diff --git a/.gitignore b/.gitignore index 426bc1ae..870b9e2e 100644 --- a/.gitignore +++ b/.gitignore @@ -28,6 +28,7 @@ coverage/ # Python .venv/ __pycache__/ +*.egg-info/ # pytest .coverage diff --git a/.viperlightignore b/.viperlightignore index 3ad45cc1..58a8b473 100644 --- a/.viperlightignore +++ b/.viperlightignore @@ -1,24 +1,5 @@ -# CDK files - files produced/installed by CDK -# js files produced by npm -.*.js -# jest.config -.*.d.ts -.*/node_modules/ -.cdk.staging -.*/package-lock.json -# Developer work files - not present in pipeline -deployment/build/ -deployment/temp/ -deployment/global-s3-assets/ -deployment/regional-s3-assets/ -deployment/open-source/ -deployment/test/ -# ignore own scan file -vlscan.json -# Ignore repo Config -Config -.venv +node_modules/ +.venv/ [python-pipoutdated] boto3=1.20.32 # Should match Lambda runtime: https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html -py-partiql-parser=0.1.0 # trust moto to resolve its own dependencies diff --git a/.viperlightrc b/.viperlightrc index 5893f191..f8085488 100644 --- a/.viperlightrc +++ b/.viperlightrc @@ -1,4 +1 @@ -{ - "failOn": "medium", - "all": true -} +{"failOn":"low","all":true} \ No newline at end of file diff --git a/.viperlightrc_global b/.viperlightrc_global new file mode 100644 index 00000000..808a84ec --- /dev/null +++ b/.viperlightrc_global @@ -0,0 +1,15 @@ +{ + "all": true, + "failOn": "high", + "safeAccounts": [ + "297356227824", + "193023089310", + "023102451235", + "797873946194", + "017000801446" + ], + "safeEmails": [ + "abc@example.co-m", + "abc@example.co.in" + ] +} diff --git a/AWSSD-DevNotes.md b/AWSSD-DevNotes.md index 28169abf..1dfbc1bc 100644 --- a/AWSSD-DevNotes.md +++ b/AWSSD-DevNotes.md @@ -8,10 +8,10 @@ ### SSM Parameters There are N parameters that control processing under /Solutions/SO0111: -CMK_ARN - encryption key for the AFSBP runbooks +CMK_ARN - encryption key for the AWS FSBP runbooks CMK_ARN - Admin account only, KMS key for solution encryption SNS_Topic_Arn - arn of the SHARR topic -sendAnonymousMetrics - controls whether the solution sends metrics +sendAnonymizedMetrics - controls whether the solution sends metrics version - solution version The following are set by each Security Standard's playbook, enabling remediation mapping in the step function: diff --git a/AWSSD-README.md b/AWSSD-README.md index fd566018..36638adc 100644 --- a/AWSSD-README.md +++ b/AWSSD-README.md @@ -78,8 +78,8 @@ Security is **Job 0**. SHARR Runbooks must be tightly secured, validate inputs, **Remediation Runbooks** are AWS-owned or SHARR-owned runbooks that perform a single remediation or remediation step for a specific resource. For example, creating a logging bucket, enabling an AWS Service, or setting a parameter on an AWS Service. The permissions to the service APIs are within the definition of the Remediation Runbook; SHARR Runbooks must be allowed to assume the remediation role. -A playbook is a set of remediations within a Security Standard (ex. "CIS", "AFSBP"). Each Playbook has a standard-specific Step Function ("Orchestrator") that "understands" the JSON format of that standard's Finding data. The Orchestrator does the following: -1. Verify the finding data matches the Standard (ex. CIS, PCI, AFSBP) +A playbook is a set of remediations within a Security Standard (ex. "CIS", "FSBP"). Each Playbook has a standard-specific Step Function ("Orchestrator") that "understands" the JSON format of that standard's Finding data. The Orchestrator does the following: +1. Verify the finding data matches the Standard (ex. CIS, PCI, FSBP) 2. Identify the control id and target account in the JSON data 3. Derive the runbook name (SHARR-\-\-\) 4. Check the status of the runbook in the target account @@ -104,7 +104,7 @@ A sample Playbook is provided as a starting point. The estimated time to create * **StandardControlArn**: arn:aws:securityhub:us-east-1:111111111111:control/aws-foundational-security-best-practices/v/1.0.0/RDS.1 * **securityStandardLongName**: aws-foundational-security-best-practices - * **securityStandardShortName**: AFSBP (can be any value you choose) + * **securityStandardShortName**: FSBP (can be any value you choose) * **version**: 1.0.0 For the following example, we will create a PCI DSS v3.2.1 Playbook: @@ -172,7 +172,7 @@ A sample Playbook is provided as a starting point. The estimated time to create const DIST_OUTPUT_BUCKET = process.env['DIST_OUTPUT_BUCKET'] || '%%BUCKET%%'; const DIST_SOLUTION_NAME = process.env['DIST_SOLUTION_NAME'] || '%%SOLUTION%%'; - const standardShortName = 'AFSBP' + const standardShortName = 'FSBP' const standardLongName = 'aws-foundational-security-best-practices' const standardVersion = '1.0.0' // DO NOT INCLUDE 'V' const RESOURCE_PREFIX = SOLUTION_ID.replace(/^DEV-/,''); // prefix on every resource name @@ -195,11 +195,11 @@ A sample Playbook is provided as a starting point. The estimated time to create 'Lambda.1', 'RDS.1', 'RDS.6', - 'RDS.7' - // 'S3.1' + 'RDS.7', + 'S3.9' ] - const adminStack = new PlaybookPrimaryStack(app, 'AFSBPStack', { + const adminStack = new PlaybookPrimaryStack(app, 'FSBPStack', { description: `(${SOLUTION_ID}P) ${SOLUTION_NAME} ${standardShortName} ${standardVersion} Compliance Pack - Admin Account, ${DIST_VERSION}`, solutionId: SOLUTION_ID, solutionVersion: DIST_VERSION, @@ -211,7 +211,7 @@ A sample Playbook is provided as a starting point. The estimated time to create securityStandardVersion: standardVersion }); - const memberStack = new PlaybookMemberStack(app, 'AFSBPMemberStack', { + const memberStack = new PlaybookMemberStack(app, 'FSBPMemberStack', { description: `(${SOLUTION_ID}C) ${SOLUTION_NAME} ${standardShortName} ${standardVersion} Compliance Pack - Member Account, ${DIST_VERSION}`, solutionId: SOLUTION_ID, solutionVersion: DIST_VERSION, @@ -229,7 +229,7 @@ A sample Playbook is provided as a starting point. The estimated time to create 5. Update test/pci321_stack.test.ts 6. Update cdk.json to point to the new bin/\*.ts name 7. ssmdocs/scripts parse script: the example should work for most Standards. Review what it does and make any adjustments. -8. Update the test script for the parse script. Copy finding json for the Security Standard to use in the test. See AFSBP, CIS for examples. +8. Update the test script for the parse script. Copy finding json for the Security Standard to use in the test. See FSBP, CIS for examples. 10. Create the ssmdocs for each control in the ssmdocs folder. This is the runbook that is invoked directly by the Orchestrator. 11. Update support.txt, README.md, description.txt 12. Add the Playbook to source/jest.config.js @@ -532,12 +532,12 @@ The Markdown in the Description for each SSM Document is displayed in the consol SHARR-__ ``` -* **standard**: abbreviation for the Security Standard. The abbreviation is set in an SSM Parameter, /**/Solutions/SO0111/// /dev/null -find $dist_dir -iname "dist" -type d -exec rm -r "{}" \; 2> /dev/null -find $dist_dir -type f -name 'package-lock.json' -delete -if [ -d $dist_dir/source/coverage ]; then - echo ---- Remove Coverage reports - do_cmd rm -rf $dist_dir/source/coverage -fi -cleanup_ts $dist_dir - -deldirs=("idna*" "pytz*" "urllib*" "certifi*" "charset*" "requests*" "__pycache__" "normalizer" ".pytest_cache" "cdk.out") -echo ---- Clean up unwanted directories -for dir_to_delete in "${deldirs[@]}" - do - echo ---- $dir_to_delete - prune_dir $dir_to_delete - done - -delfiles=(".coveragerc" ".DS_Store") -for file_to_delete in "${delfiles[@]}"; - do - echo "---- Removing file: $file_to_delete everywhere" - find $dist_dir -type f -name $file_to_delete | while read file - do - do_cmd rm $file - done - done - -echo "------------------------------------------------------------------------------" -echo "[Packing] Create GitHub (open-source) zip file" -echo "------------------------------------------------------------------------------" -cd $dist_dir -do_cmd zip -q -r9 ../${SOLUTION_TRADEMARKEDNAME}.zip * .gitignore .github -echo "Clean up open-source folder" -do_cmd rm -rf * -do_cmd rm .gitignore -do_cmd rm -r .github -do_cmd mv ../${SOLUTION_TRADEMARKEDNAME}.zip . -echo "Completed building ${SOLUTION_TRADEMARKEDNAME}.zip dist" +main "$@" diff --git a/deployment/build-s3-dist.sh b/deployment/build-s3-dist.sh index 4290a41b..35050e21 100755 --- a/deployment/build-s3-dist.sh +++ b/deployment/build-s3-dist.sh @@ -1,27 +1,27 @@ #!/usr/bin/env bash # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -[[ $TRACE ]] && set -x -set -eo pipefail +[[ $DEBUG ]] && set -x +set -eu -o pipefail header() { - declare text=$1 - echo "------------------------------------------------------------------------------" - echo "$text" - echo "------------------------------------------------------------------------------" + declare text=$1 + echo "------------------------------------------------------------------------------" + echo "$text" + echo "------------------------------------------------------------------------------" } usage() { - echo "Usage: $0 -b [-v ] [-t]" - echo "Version must be provided via a parameter or ../version.txt. Others are optional." - echo "-t indicates this is a pre-prod build and instructs the build to use a non-prod Solution ID, DEV-SOxxxx" - echo "Production example: ./build-s3-dist.sh -b solutions -v v1.0.0" - echo "Dev example: ./build-s3-dist.sh -b solutions -v v1.0.0 -t" + echo "Usage: $0 -b [-v ] [-t]" + echo "Version must be provided via a parameter or ../version.txt. Others are optional." + echo "-t indicates this is a pre-prod build and instructs the build to use a non-prod Solution ID, DEV-SOxxxx" + echo "Production example: ./build-s3-dist.sh -b solutions -v v1.0.0" + echo "Dev example: ./build-s3-dist.sh -b solutions -v v1.0.0 -t" } clean() { - declare clean_dirs=("$@") - for dir in ${clean_dirs[@]}; do rm -rf "$dir"; done + declare clean_dirs=("$@") + for dir in ${clean_dirs[@]}; do rm -rf "$dir"; done } # This assumes all of the OS-level configuration has been completed and git repo has already been cloned @@ -40,139 +40,147 @@ clean() { # # - version-code: version of the package main() { - local root_dir=$(dirname "$(cd -P -- "$(dirname "$0")" && pwd -P)") - local template_dir="$root_dir"/deployment - local template_dist_dir="$template_dir"/global-s3-assets - local build_dist_dir="$template_dir/"regional-s3-assets - local source_dir="$root_dir"/source - local temp_work_dir="${template_dir}"/temp - - local clean_dirs=("$template_dist_dir" "$build_dist_dir" "$temp_work_dir") - - while getopts ":b:v:tch" opt; - do - case "${opt}" in - b) local bucket=${OPTARG};; - v) local version=${OPTARG};; - t) local devtest=1;; - c) clean "${clean_dirs[@]}" && exit 0;; - *) usage && exit 0;; - esac - done - - if [[ -z "$version" ]]; then - usage && exit 1 - fi - - # Prepend version with "v" if it does not already start with "v" - if [[ $version != v* ]]; then - version=v"$version" - fi - - clean "${clean_dirs[@]}" - - # Save in environmental variables to simplify builds (?) - echo "export DIST_OUTPUT_BUCKET=$bucket" > "$template_dir"/setenv.sh - echo "export DIST_VERSION=$version" >> "$template_dir"/setenv.sh - - if [[ ! -e "$template_dir"/solution_env.sh ]]; then - echo "solution_env.sh is missing from the solution root." && exit 1 - fi - - source "$template_dir"/solution_env.sh - - if [[ -z "$SOLUTION_ID" ]] || [[ -z "$SOLUTION_NAME" ]] || [[ -z "$SOLUTION_TRADEMARKEDNAME" ]]; then - echo "Missing one of SOLUTION_ID, SOLUTION_NAME, or SOLUTION_TRADEMARKEDNAME from solution_env.sh" && exit 1 - fi - - if [[ ! -z $devtest ]]; then - SOLUTION_ID=DEV-$SOLUTION_ID - fi - export SOLUTION_ID - export SOLUTION_NAME - export SOLUTION_TRADEMARKEDNAME - - echo "export DIST_SOLUTION_NAME=$SOLUTION_TRADEMARKEDNAME" >> ./setenv.sh - - source "$template_dir"/setenv.sh - - header "Building $SOLUTION_NAME ($SOLUTION_ID) version $version for bucket $bucket" - - header "[Init] Create folders" - mkdir -p "$template_dist_dir" - mkdir -p "$build_dist_dir" - mkdir -p "$temp_work_dir" - mkdir -p "$build_dist_dir"/lambda - mkdir -p "$template_dist_dir"/playbooks - - header "[Pack] Lambda Layer (used by playbooks)" - - pushd "$temp_work_dir" - mkdir -p "$temp_work_dir"/source/solution_deploy/lambdalayer/python - cp "$source_dir"/LambdaLayers/*.py "$temp_work_dir"/source/solution_deploy/lambdalayer/python - popd - - pushd "$temp_work_dir"/source/solution_deploy/lambdalayer - zip --recurse-paths "$build_dist_dir"/lambda/layer.zip python - popd - - header "[Pack] Custom Action Lambda" - - pushd "$source_dir"/solution_deploy/source - zip ${build_dist_dir}/lambda/action_target_provider.zip action_target_provider.py cfnresponse.py - popd - - header "[Pack] Wait Provider Lambda" - - pushd "$source_dir"/solution_deploy/source - zip ${build_dist_dir}/lambda/wait_provider.zip wait_provider.py cfnresponse.py - - header "[Pack] Orchestrator Lambdas" - - pushd "$source_dir"/Orchestrator - ls | while read file; do - if [ ! -d $file ]; then - zip "$build_dist_dir"/lambda/"$file".zip "$file" - fi - done - popd - - header "[Create] Playbooks" - - for playbook in $(ls "$source_dir"/playbooks); do - if [ $playbook == 'NEWPLAYBOOK' ] || [ $playbook == '.coverage' ] || [ $playbook == 'common' ]; then - continue - fi - echo Create $playbook playbook - pushd "$source_dir"/playbooks/"$playbook" - npx cdk synth - cd cdk.out - for template in $(ls *.template.json); do - cp "$template" "$template_dist_dir"/playbooks/${template%.json} - done - popd - done - - header "[Create] Deployment Templates" + local root_dir=$(dirname "$(cd -P -- "$(dirname "$0")" && pwd -P)") + local template_dir="$root_dir"/deployment + local template_dist_dir="$template_dir"/global-s3-assets + local build_dist_dir="$template_dir/"regional-s3-assets + local source_dir="$root_dir"/source + local temp_work_dir="${template_dir}"/temp + local devtest="" - pushd "$source_dir"/solution_deploy + local clean_dirs=("$template_dist_dir" "$build_dist_dir" "$temp_work_dir") - npx cdk synth - cd cdk.out - for template in $(ls *.template.json); do - cp "$template" "$template_dist_dir"/${template%.json} - done - popd + while getopts ":b:v:tch" opt; + do + case "${opt}" in + b) local bucket=${OPTARG};; + v) local version=${OPTARG};; + t) devtest=1;; + c) clean "${clean_dirs[@]}" && exit 0;; + *) usage && exit 0;; + esac + done + + if [[ -z "$version" ]]; then + usage && exit 1 + fi - [ -e "$template_dir"/*.template ] && cp "$template_dir"/*.template "$template_dist_dir"/ + # Prepend version with "v" if it does not already start with "v" + if [[ $version != v* ]]; then + version=v"$version" + fi + + clean "${clean_dirs[@]}" + + # Save in environmental variables to simplify builds (?) + echo "export DIST_OUTPUT_BUCKET=$bucket" > "$template_dir"/setenv.sh + echo "export DIST_VERSION=$version" >> "$template_dir"/setenv.sh + + if [[ ! -e "$template_dir"/solution_env.sh ]]; then + echo "solution_env.sh is missing from the solution root." && exit 1 + fi + + source "$template_dir"/solution_env.sh + + if [[ -z "$SOLUTION_ID" ]] || [[ -z "$SOLUTION_NAME" ]] || [[ -z "$SOLUTION_TRADEMARKEDNAME" ]]; then + echo "Missing one of SOLUTION_ID, SOLUTION_NAME, or SOLUTION_TRADEMARKEDNAME from solution_env.sh" && exit 1 + fi + + if [[ ! -z $devtest ]]; then + SOLUTION_ID=DEV-$SOLUTION_ID + fi + export SOLUTION_ID + export SOLUTION_NAME + export SOLUTION_TRADEMARKEDNAME + + echo "export DIST_SOLUTION_NAME=$SOLUTION_TRADEMARKEDNAME" >> ./setenv.sh + + source "$template_dir"/setenv.sh + + header "Building $SOLUTION_NAME ($SOLUTION_ID) version $version for bucket $bucket" + + header "[Init] Create folders" + mkdir -p "$template_dist_dir" + mkdir -p "$build_dist_dir" + mkdir -p "$temp_work_dir" + mkdir -p "$build_dist_dir"/lambda + mkdir -p "$template_dist_dir"/playbooks + + header "[Pack] Lambda Layer (used by playbooks)" + + pushd "$temp_work_dir" + mkdir -p "$temp_work_dir"/source/solution_deploy/lambdalayer/python/layer + cp "$source_dir"/layer/*.py "$temp_work_dir"/source/solution_deploy/lambdalayer/python/layer + popd + + pushd "$temp_work_dir"/source/solution_deploy/lambdalayer + zip --recurse-paths "$build_dist_dir"/lambda/layer.zip python + popd + + header "[Pack] Custom Action Lambda" + + pushd "$source_dir"/solution_deploy/source + zip ${build_dist_dir}/lambda/action_target_provider.zip action_target_provider.py cfnresponse.py + popd + + header "[Pack] Deployment Metrics Custom Action Lambda" + + pushd "$source_dir"/solution_deploy/source + zip ${build_dist_dir}/lambda/deployment_metrics_custom_resource.zip deployment_metrics_custom_resource.py cfnresponse.py + popd + + + header "[Pack] Wait Provider Lambda" + + pushd "$source_dir"/solution_deploy/source + zip ${build_dist_dir}/lambda/wait_provider.zip wait_provider.py cfnresponse.py + + header "[Pack] Orchestrator Lambdas" + + pushd "$source_dir"/Orchestrator + ls | while read file; do + if [ ! -d $file ]; then + zip "$build_dist_dir"/lambda/"$file".zip "$file" + fi + done + popd - mv "$template_dist_dir"/SolutionDeployStack.template "$template_dist_dir"/aws-sharr-deploy.template - mv "$template_dist_dir"/MemberStack.template "$template_dist_dir"/aws-sharr-member.template - mv "$template_dist_dir"/RunbookStack.template "$template_dist_dir"/aws-sharr-remediations.template - mv "$template_dist_dir"/OrchestratorLogStack.template "$template_dist_dir"/aws-sharr-orchestrator-log.template - mv "$template_dist_dir"/MemberRoleStack.template "$template_dist_dir"/aws-sharr-member-roles.template + header "[Create] Playbooks" - rm "$template_dist_dir"/*.nested.template + for playbook in $(ls "$source_dir"/playbooks); do + if [ $playbook == 'NEWPLAYBOOK' ] || [ $playbook == '.coverage' ] || [ $playbook == 'common' ]; then + continue + fi + echo Create $playbook playbook + pushd "$source_dir"/playbooks/"$playbook" + npx cdk synth + cd cdk.out + for template in $(ls *.template.json); do + cp "$template" "$template_dist_dir"/playbooks/${template%.json} + done + popd + done + + header "[Create] Deployment Templates" + + pushd "$source_dir"/solution_deploy + + npx cdk synth + cd cdk.out + for template in $(ls *.template.json); do + cp "$template" "$template_dist_dir"/${template%.json} + done + popd + + [ -e "$template_dir"/*.template ] && cp "$template_dir"/*.template "$template_dist_dir"/ + + mv "$template_dist_dir"/SolutionDeployStack.template "$template_dist_dir"/aws-sharr-deploy.template + mv "$template_dist_dir"/MemberStack.template "$template_dist_dir"/aws-sharr-member.template + mv "$template_dist_dir"/RunbookStack.template "$template_dist_dir"/aws-sharr-remediations.template + mv "$template_dist_dir"/OrchestratorLogStack.template "$template_dist_dir"/aws-sharr-orchestrator-log.template + mv "$template_dist_dir"/MemberRoleStack.template "$template_dist_dir"/aws-sharr-member-roles.template + + rm "$template_dist_dir"/*.nested.template } main "$@" diff --git a/deployment/requirements_dev.txt b/deployment/requirements_dev.txt new file mode 100644 index 00000000..08430108 --- /dev/null +++ b/deployment/requirements_dev.txt @@ -0,0 +1,14 @@ +aws-lambda-powertools +black +boto3-stubs-lite[cloudfront,cloudformation,cloudwatch,ec2,iam,s3,sns,ssm,sts] +flake8 +isort +moto[cloudfront,dynamodb,s3] +mypy +pytest +pytest-cov +pytest-env +pytest-mock +tox +types-urllib3 +urllib3<2 diff --git a/deployment/run-unit-tests.sh b/deployment/run-unit-tests.sh index a779431f..e4feda5d 100755 --- a/deployment/run-unit-tests.sh +++ b/deployment/run-unit-tests.sh @@ -1,6 +1,9 @@ #!/usr/bin/env bash # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 +[[ "$DEBUG" ]] && set -x +set -eo pipefail + maxrc=0 rc=0 export overrideWarningsEnabled=false @@ -15,7 +18,11 @@ source ./.venv/bin/activate python3 -m pip install -U pip setuptools echo 'Installing required Python testing modules' -pip install -r ./testing_requirements.txt +pip install -r ./requirements_dev.txt + +cd .. +pip install -e . +cd ./deployment # Get reference for all important folders template_dir="$PWD" @@ -27,22 +34,22 @@ coverage_report_path="${template_dir}/test/coverage-reports" mkdir -p ${coverage_report_path} run_pytest() { - cd ${1} - report_file="${coverage_report_path}/${2}.coverage.xml" - echo "coverage report path set to ${report_file}" + cd ${1} + report_file="${coverage_report_path}/${2}.coverage.xml" + echo "coverage report path set to ${report_file}" - # Use -vv for debugging - python3 -m pytest --cov --cov-report=term-missing --cov-report "xml:$report_file" - rc=$? + # Use -vv for debugging + python3 -m pytest --cov --cov-report=term-missing --cov-report "xml:$report_file" + rc=$? - if [ "$rc" -ne "0" ]; then - echo "** UNIT TESTS FAILED **" - else - echo "Unit Tests Successful" - fi - if [ "$rc" -gt "$maxrc" ]; then - maxrc=$rc - fi + if [ "$rc" -ne "0" ]; then + echo "** UNIT TESTS FAILED **" + else + echo "Unit Tests Successful" + fi + if [ "$rc" -gt "$maxrc" ]; then + maxrc=$rc + fi } if [[ -e './solution_env.sh' ]]; then @@ -75,18 +82,28 @@ else fi echo "------------------------------------------------------------------------------" -echo "[Test] CDK Unit Tests" +echo "[Lint] Code Style and Lint" echo "------------------------------------------------------------------------------" cd $source_dir +npx prettier --check '**/*.ts' +npx eslint --ext .ts --max-warnings=0 . +cd .. +tox -e format +tox -e lint + +echo "------------------------------------------------------------------------------" +echo "[Test] CDK Unit Tests" +echo "------------------------------------------------------------------------------" +cd "$source_dir" [[ $update == "true" ]] && { npm run test -- -u } || { npm run test rc=$? if [ "$rc" -ne "0" ]; then - echo "** UNIT TESTS FAILED **" + echo "** UNIT TESTS FAILED **" else - echo "Unit Tests Successful" + echo "Unit Tests Successful" fi if [ "$rc" -gt "$maxrc" ]; then maxrc=$rc @@ -106,7 +123,7 @@ run_pytest "${source_dir}/solution_deploy/source" "SolutionDeploy" echo "------------------------------------------------------------------------------" echo "[Test] Python Unit Tests - LambdaLayers" echo "------------------------------------------------------------------------------" -run_pytest "${source_dir}/LambdaLayers" "LambdaLayers" +run_pytest "${source_dir}/layer" "LambdaLayers" echo "------------------------------------------------------------------------------" echo "[Test] Python Scripts for Remediation Runbooks" @@ -123,7 +140,7 @@ echo "[Test] Python Scripts for Playbooks" echo "------------------------------------------------------------------------------" for playbook in `ls ${source_dir}/playbooks`; do if [ -d ${source_dir}/playbooks/${playbook}/ssmdocs/scripts/tests ]; then - run_pytest "${source_dir}/playbooks/${playbook}/ssmdocs/scripts" "Playbook${playbook}" + run_pytest "${source_dir}/playbooks/${playbook}/ssmdocs/scripts" "Playbook${playbook}" fi done @@ -138,9 +155,9 @@ sed -i -e "s|.*${temp_source_dir}|source|g" $coverage_report_pat echo "=========================================================================" if [ "$maxrc" -ne "0" ]; then - echo "** UNIT TESTS FAILED **" + echo "** UNIT TESTS FAILED **" else - echo "ALL UNIT TESTS PASSED" + echo "ALL UNIT TESTS PASSED" fi exit $maxrc diff --git a/deployment/solution_env.sh b/deployment/solution_env.sh index 385ef36c..1723d702 100755 --- a/deployment/solution_env.sh +++ b/deployment/solution_env.sh @@ -2,5 +2,5 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 export SOLUTION_ID='SO0111' -export SOLUTION_NAME='AWS Security Hub Automated Response & Remediation' -export SOLUTION_TRADEMARKEDNAME='aws-security-hub-automated-response-and-remediation' +export SOLUTION_NAME='Automated Security Response on AWS' +export SOLUTION_TRADEMARKEDNAME='automated-security-response-on-aws' diff --git a/deployment/testing_requirements.txt b/deployment/testing_requirements.txt deleted file mode 100644 index 532d6658..00000000 --- a/deployment/testing_requirements.txt +++ /dev/null @@ -1,8 +0,0 @@ -moto[s3] -aws-lambda-powertools -boto3-stubs-lite[s3] -pytest -pytest-cov -pytest-env -pytest-mock -urllib3<2 diff --git a/deployment/upload-s3-dist.sh b/deployment/upload-s3-dist.sh index dc50685e..4f12ebe7 100755 --- a/deployment/upload-s3-dist.sh +++ b/deployment/upload-s3-dist.sh @@ -104,6 +104,6 @@ echo "---" # read -p "Press [Enter] key to start upload to $region" aws s3 sync ./global-s3-assets s3://${bucket}-reference/$solution_name/$version/ -aws s3 sync ./regional-s3-assets s3://$bucket-${region}/$solution_name/$version/ +aws s3 sync ./regional-s3-assets s3://$bucket-${region}/$solution_name/$version/ --region ${region} echo "Completed uploading distribution. You may now install from the templates in ${bucket-reference}-reference/${solution_name}/${version}/" diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 00000000..cd378571 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,22 @@ +[mypy] +warn_unused_configs = True +warn_redundant_casts = True +warn_unused_ignores = True + +strict_equality = True +strict_concatenate = True + +check_untyped_defs = True + +disallow_subclassing_any = True +disallow_untyped_decorators = True +disallow_any_generics = True + +disallow_incomplete_defs = True + +no_implicit_reexport = True + +warn_return_any = True + +[mypy-moto] +ignore_missing_imports = True diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..6375ba28 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,10 @@ +[project] +name = "automated_security_response_on_aws" +version = "2.1.0" + +[tool.setuptools] +package-dir = {"" = "source"} +packages = ["layer"] + +[tool.setuptools.package-data] +"layer" = ["py.typed"] diff --git a/simtest/simdata/afsbp-cloudfront.1.json b/simtest/simdata/afsbp-cloudfront.1.json new file mode 100644 index 00000000..562aeb81 --- /dev/null +++ b/simtest/simdata/afsbp-cloudfront.1.json @@ -0,0 +1,76 @@ +{ + "SchemaVersion": "2018-10-08", + "Id": "arn:aws:securityhub:us-east-1:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0/CloudFront.1/finding/62f4c4cb-b17d-4575-b8e4-b1f2d8fc7350", + "ProductArn": "arn:aws:securityhub:us-east-1::product/aws/securityhub", + "ProductName": "Security Hub", + "CompanyName": "AWS", + "Region": "us-east-1", + "GeneratorId": "aws-foundational-security-best-practices/v/1.0.0/CloudFront.1", + "AwsAccountId": "111111111111", + "Types": [ + "Software and Configuration Checks/Industry and Regulatory Standards/AWS-Foundational-Security-Best-Practices" + ], + "FirstObservedAt": "2023-08-29T22:04:14.831Z", + "LastObservedAt": "2023-08-29T22:04:21.785Z", + "CreatedAt": "2023-08-29T22:04:14.831Z", + "UpdatedAt": "2023-08-29T22:04:14.831Z", + "Severity": { + "Product": 90, + "Label": "CRITICAL", + "Normalized": 90, + "Original": "CRITICAL" + }, + "Title": "CloudFront.1 CloudFront distributions should have a default root object configured", + "Description": "This control checks whether an Amazon CloudFront distribution is configured to return a specific object that is the default root object. The control fails if the CloudFront distribution does not have a default root object configured.", + "Remediation": { + "Recommendation": { + "Text": "For information on how to correct this issue, consult the AWS Security Hub controls documentation.", + "Url": "https://docs.aws.amazon.com/console/securityhub/CloudFront.1/remediation" + } + }, + "ProductFields": { + "StandardsArn": "arn:aws:securityhub:::standards/aws-foundational-security-best-practices/v/1.0.0", + "StandardsSubscriptionArn": "arn:aws:securityhub:us-east-1:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0", + "ControlId": "CloudFront.1", + "RecommendationUrl": "https://docs.aws.amazon.com/console/securityhub/CloudFront.1/remediation", + "RelatedAWSResources:0/name": "securityhub-cloudfront-default-root-object-configured-4276159b", + "RelatedAWSResources:0/type": "AWS::Config::ConfigRule", + "StandardsControlArn": "arn:aws:securityhub:us-east-1:111111111111:control/aws-foundational-security-best-practices/v/1.0.0/CloudFront.1", + "aws/securityhub/ProductName": "Security Hub", + "aws/securityhub/CompanyName": "AWS", + "Resources:0/Id": "arn:aws:cloudfront::111111111111:distribution/E202DN2YO141F9", + "aws/securityhub/FindingId": "arn:aws:securityhub:us-east-1::product/aws/securityhub/arn:aws:securityhub:us-east-1:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0/CloudFront.1/finding/62f4c4cb-b17d-4575-b8e4-b1f2d8fc7350" + }, + "Resources": [ + { + "Type": "AwsCloudFrontDistribution", + "Id": "arn:aws:cloudfront::111111111111:distribution/E202DN2YO141F9", + "Partition": "aws", + "Region": "us-east-1" + } + ], + "Compliance": { + "Status": "FAILED", + "SecurityControlId": "CloudFront.1", + "AssociatedStandards": [ + { + "StandardsId": "standards/aws-foundational-security-best-practices/v/1.0.0" + } + ] + }, + "WorkflowState": "NEW", + "Workflow": { + "Status": "NEW" + }, + "RecordState": "ACTIVE", + "FindingProviderFields": { + "Severity": { + "Label": "CRITICAL", + "Original": "CRITICAL" + }, + "Types": [ + "Software and Configuration Checks/Industry and Regulatory Standards/AWS-Foundational-Security-Best-Practices" + ] + }, + "ProcessedAt": "2023-08-29T22:04:26.320Z" + } \ No newline at end of file diff --git a/simtest/simdata/afsbp-codebuild.5.json b/simtest/simdata/afsbp-codebuild.5.json new file mode 100644 index 00000000..7fa01cc5 --- /dev/null +++ b/simtest/simdata/afsbp-codebuild.5.json @@ -0,0 +1,76 @@ +{ + "SchemaVersion": "2018-10-08", + "Id": "arn:aws:securityhub:us-east-1:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0/CodeBuild.5/finding/db862c92-eeb6-4e84-978d-627b74c055fc", + "ProductArn": "arn:aws:securityhub:us-east-1::product/aws/securityhub", + "ProductName": "Security Hub", + "CompanyName": "AWS", + "Region": "us-east-1", + "GeneratorId": "aws-foundational-security-best-practices/v/1.0.0/CodeBuild.5", + "AwsAccountId": "111111111111", + "Types": [ + "Software and Configuration Checks/Industry and Regulatory Standards/AWS-Foundational-Security-Best-Practices" + ], + "FirstObservedAt": "2023-08-24T23:18:59.036Z", + "LastObservedAt": "2023-08-25T16:42:22.395Z", + "CreatedAt": "2023-08-24T23:18:59.036Z", + "UpdatedAt": "2023-08-25T16:42:11.112Z", + "Severity": { + "Product": 70, + "Label": "HIGH", + "Normalized": 70, + "Original": "HIGH" + }, + "Title": "CodeBuild.5 CodeBuild project environments should not have privileged mode enabled", + "Description": "This control checks if an AWS CodeBuild project environment has privileged mode enabled.", + "Remediation": { + "Recommendation": { + "Text": "For information on how to correct this issue, consult the AWS Security Hub controls documentation.", + "Url": "https://docs.aws.amazon.com/console/securityhub/CodeBuild.5/remediation" + } + }, + "ProductFields": { + "StandardsArn": "arn:aws:securityhub:::standards/aws-foundational-security-best-practices/v/1.0.0", + "StandardsSubscriptionArn": "arn:aws:securityhub:us-east-1:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0", + "ControlId": "CodeBuild.5", + "RecommendationUrl": "https://docs.aws.amazon.com/console/securityhub/CodeBuild.5/remediation", + "RelatedAWSResources:0/name": "securityhub-codebuild-project-environment-privileged-check-974aa0aa", + "RelatedAWSResources:0/type": "AWS::Config::ConfigRule", + "StandardsControlArn": "arn:aws:securityhub:us-east-1:111111111111:control/aws-foundational-security-best-practices/v/1.0.0/CodeBuild.5", + "aws/securityhub/ProductName": "Security Hub", + "aws/securityhub/CompanyName": "AWS", + "Resources:0/Id": "arn:aws:codebuild:us-east-1:111111111111:project/test", + "aws/securityhub/FindingId": "arn:aws:securityhub:us-east-1::product/aws/securityhub/arn:aws:securityhub:us-east-1:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0/CodeBuild.5/finding/db862c92-eeb6-4e84-978d-627b74c055fc" + }, + "Resources": [ + { + "Type": "AwsCodeBuildProject", + "Id": "arn:aws:codebuild:us-east-1:111111111111:project/test", + "Partition": "aws", + "Region": "us-east-1" + } + ], + "Compliance": { + "Status": "FAILED", + "SecurityControlId": "CodeBuild.5", + "AssociatedStandards": [ + { + "StandardsId": "standards/aws-foundational-security-best-practices/v/1.0.0" + } + ] + }, + "WorkflowState": "NEW", + "Workflow": { + "Status": "NEW" + }, + "RecordState": "ACTIVE", + "FindingProviderFields": { + "Severity": { + "Label": "HIGH", + "Original": "HIGH" + }, + "Types": [ + "Software and Configuration Checks/Industry and Regulatory Standards/AWS-Foundational-Security-Best-Practices" + ] + }, + "ProcessedAt": "2023-08-25T16:42:26.635Z" + } \ No newline at end of file diff --git a/simtest/simdata/afsbp-ec2.8.json b/simtest/simdata/afsbp-ec2.8.json new file mode 100644 index 00000000..5c06cdd3 --- /dev/null +++ b/simtest/simdata/afsbp-ec2.8.json @@ -0,0 +1,98 @@ +{ + "SchemaVersion": "2018-10-08", + "Id": "arn:aws:securityhub:us-east-1:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0/EC2.8/finding/7ff2dedd-7f57-4195-a7c9-b2d5dacd7de6", + "ProductArn": "arn:aws:securityhub:us-east-1::product/aws/securityhub", + "ProductName": "Security Hub", + "CompanyName": "AWS", + "Region": "us-east-1", + "GeneratorId": "aws-foundational-security-best-practices/v/1.0.0/EC2.8", + "AwsAccountId": "111111111111", + "Types": [ + "Software and Configuration Checks/Industry and Regulatory Standards/AWS-Foundational-Security-Best-Practices" + ], + "FirstObservedAt": "2023-08-21T23:04:49.836Z", + "LastObservedAt": "2023-08-21T23:04:54.133Z", + "CreatedAt": "2023-08-21T23:04:49.836Z", + "UpdatedAt": "2023-08-21T23:04:49.836Z", + "Severity": { + "Product": 70, + "Label": "HIGH", + "Normalized": 70, + "Original": "HIGH" + }, + "Title": "EC2.8 EC2 instances should use Instance Metadata Service Version 2 (IMDSv2)", + "Description": "This control checks whether your Amazon Elastic Compute Cloud (Amazon EC2) instance metadata version is configured with Instance Metadata Service Version 2 (IMDSv2). The control passes if HttpTokens is set to required for IMDSv2. The control fails if HttpTokens is set to optional.", + "Remediation": { + "Recommendation": { + "Text": "For information on how to correct this issue, consult the AWS Security Hub controls documentation.", + "Url": "https://docs.aws.amazon.com/console/securityhub/EC2.8/remediation" + } + }, + "ProductFields": { + "StandardsArn": "arn:aws:securityhub:::standards/aws-foundational-security-best-practices/v/1.0.0", + "StandardsSubscriptionArn": "arn:aws:securityhub:us-east-1:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0", + "ControlId": "EC2.8", + "RecommendationUrl": "https://docs.aws.amazon.com/console/securityhub/EC2.8/remediation", + "RelatedAWSResources:0/name": "securityhub-ec2-imdsv2-check-fea82885", + "RelatedAWSResources:0/type": "AWS::Config::ConfigRule", + "StandardsControlArn": "arn:aws:securityhub:us-east-1:111111111111:control/aws-foundational-security-best-practices/v/1.0.0/EC2.8", + "aws/securityhub/ProductName": "Security Hub", + "aws/securityhub/CompanyName": "AWS", + "Resources:0/Id": "arn:aws:ec2:us-east-1:111111111111:instance/i-077c4d5f32561ac45", + "aws/securityhub/FindingId": "arn:aws:securityhub:us-east-1::product/aws/securityhub/arn:aws:securityhub:us-east-1:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0/EC2.8/finding/7ff2dedd-7f57-4195-a7c9-b2d5dacd7de6" + }, + "Resources": [ + { + "Type": "AwsEc2Instance", + "Id": "arn:aws:ec2:us-east-1:111111111111:instance/i-077c4d5f32561ac45", + "Partition": "aws", + "Region": "us-east-1", + "Details": { + "AwsEc2Instance": { + "ImageId": "ami-08a52ddb321b32a8c", + "VpcId": "vpc-0771fca5bdb3f07f5", + "SubnetId": "subnet-016e21c9895ea4ded", + "LaunchedAt": "2023-08-21T23:02:49.000Z", + "NetworkInterfaces": [ + { + "NetworkInterfaceId": "eni-0b0adfc890534c925" + } + ], + "VirtualizationType": "hvm", + "MetadataOptions": { + "HttpEndpoint": "enabled", + "HttpPutResponseHopLimit": 2, + "HttpTokens": "optional" + }, + "Monitoring": { + "State": "disabled" + } + } + } + } + ], + "Compliance": { + "Status": "FAILED", + "SecurityControlId": "EC2.8", + "AssociatedStandards": [ + { + "StandardsId": "standards/aws-foundational-security-best-practices/v/1.0.0" + } + ] + }, + "WorkflowState": "NEW", + "Workflow": { + "Status": "NEW" + }, + "RecordState": "ACTIVE", + "FindingProviderFields": { + "Severity": { + "Label": "HIGH", + "Original": "HIGH" + }, + "Types": [ + "Software and Configuration Checks/Industry and Regulatory Standards/AWS-Foundational-Security-Best-Practices" + ] + }, + "ProcessedAt": "2023-08-21T23:04:56.596Z" + } \ No newline at end of file diff --git a/source/LambdaLayers/test/__init__.py b/simtest/simtest/__init__.py similarity index 100% rename from source/LambdaLayers/test/__init__.py rename to simtest/simtest/__init__.py diff --git a/simtest/simtest/boto_session.py b/simtest/simtest/boto_session.py index a0625d5b..81b6b5cf 100644 --- a/simtest/simtest/boto_session.py +++ b/simtest/simtest/boto_session.py @@ -5,37 +5,42 @@ _session = None + def get_session(): if not _session: - raise Exception('Session has not been initialized') + raise Exception("Session has not been initialized") return _session + def create_session(profile, region): global _session _session = BotoSession(profile, region) return _session + class BotoSession: def __init__(self, profile, region): self._config = botocore.config.Config( - region_name = region, - retries = {'max_attempts': 10}) - self._session = boto3.session.Session(profile_name = profile) + region_name=region, retries={"max_attempts": 10} + ) + self._session = boto3.session.Session(profile_name=profile) self._partition = None self._account = None def client(self, name, **kwargs): - return self._session.client(name, config = self._config, **kwargs) + return self._session.client(name, config=self._config, **kwargs) def resource(self, name, **kwargs): - return self._session.resource(name, config = self._config, **kwargs) + return self._session.resource(name, config=self._config, **kwargs) def get_partition(self): if not self._partition: - self._partition = self.client('sts').get_caller_identity()['Arn'].split(':')[1] + self._partition = ( + self.client("sts").get_caller_identity()["Arn"].split(":")[1] + ) return self._partition def get_account(self): if not self._account: - self._account = self.client('sts').get_caller_identity()['Account'] + self._account = self.client("sts").get_caller_identity()["Account"] return self._account diff --git a/simtest/simtest/controls.py b/simtest/simtest/controls.py index 5fc9c4c2..aed30150 100644 --- a/simtest/simtest/controls.py +++ b/simtest/simtest/controls.py @@ -1,36 +1,67 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -from simtest.remediation.autoscaling import * -from simtest.remediation.aws_lambda import * -from simtest.remediation.cloudtrail import * -from simtest.remediation.cloudwatch import * -from simtest.remediation.config import * -from simtest.remediation.ec2 import * -from simtest.remediation.guardduty import * -from simtest.remediation.iam import * -from simtest.remediation.kms import * -from simtest.remediation.rds import * -from simtest.remediation.s3 import * -from simtest.remediation.vpc import * +from simtest.remediation.autoscaling import run_autoscaling_1 +from simtest.remediation.aws_lambda import run_make_lambda_private +from simtest.remediation.cloudtrail import ( + run_create_cloudtrail_multi_region_trail, + run_create_ct_access_logging, + run_create_multi_region_cloudtrail, + run_enable_cloudtrail_logfile_validation, + run_enable_ct_encryption, + run_log_cloudtrail_to_cloudwatch, + run_make_cloudtrail_s3_bucket_private, +) +from simtest.remediation.cloudwatch import run_log_and_filter +from simtest.remediation.config import run_setup_config +from simtest.remediation.ec2 import ( + run_close_default_sg, + run_disable_public_access_for_security_group, + run_enable_ebs_encryption_by_default, + run_remove_public_ec2_snaps, + run_remove_vpc_default_security_group_rules, +) +from simtest.remediation.guardduty import run_guardduty_1 +from simtest.remediation.iam import ( + run_remove_old_credentials, + run_revoke_unrotated_keys, + run_set_password_policy, +) +from simtest.remediation.kms import run_setup_key_rotation +from simtest.remediation.rds import ( + run_enable_enhanced_monitoring_on_rds_instance, + run_enable_rds_cluster_deletion_protection, + run_make_rds_snapshot_private, +) +from simtest.remediation.s3 import ( + run_s3_block_public_access, + run_s3_block_public_bucket_access, +) +from simtest.remediation.vpc import run_enable_vpc_flow_logs # CIS 1.3 - 1.4 # [CIS.1.3] Ensure credentials unused for 90 days or greater are disabled # [CIS.1.4] Ensure access keys are rotated every 90 days or less + def setup_cis13(account, region): - run_remove_old_credentials('cis13', account, region) + run_remove_old_credentials("cis13", account, region) + def setup_afsbp_iam_8(account, region): - run_remove_old_credentials('afsbp-iam.8', account, region) + run_remove_old_credentials("afsbp-iam.8", account, region) + def setup_pci_iam_7(account, region): - run_remove_old_credentials('pci-iam.7', account, region) + run_remove_old_credentials("pci-iam.7", account, region) + def setup_cis14(account, region): - run_revoke_unrotated_keys('cis14', account, region) + run_revoke_unrotated_keys("cis14", account, region) + def setup_afsbp_iam_3(account, region): - run_revoke_unrotated_keys('afsbp-iam.3', account, region) + run_revoke_unrotated_keys("afsbp-iam.3", account, region) + # CIS 1.5 - 1.11 # [CIS.1.5] Ensure IAM password policy requires at least one uppercase letter @@ -41,33 +72,41 @@ def setup_afsbp_iam_3(account, region): # [CIS.1.10] Ensure IAM password policy prevents password reuse # [CIS.1.11] Ensure IAM password policy expires passwords within 90 days or less def setup_cis15(account, region): - run_set_password_policy('cis15111', account, region) + run_set_password_policy("cis15111", account, region) + def setup_afsbp_iam_7(account, region): - run_set_password_policy('afsbp-iam.7', account, region) + run_set_password_policy("afsbp-iam.7", account, region) + def setup_pci_iam_8(account, region): - run_set_password_policy('pci-iam.8', account, region) + run_set_password_policy("pci-iam.8", account, region) + # CIS 2.1 # def setup_cis21(account, region): - run_create_multi_region_cloudtrail('cis21', account, region) + run_create_multi_region_cloudtrail("cis21", account, region) + def setup_afsbp_cloudtrail_1(account, region): - run_create_multi_region_cloudtrail('afsbp-cloudtrail.1', account, region) + run_create_multi_region_cloudtrail("afsbp-cloudtrail.1", account, region) + def setup_pci_cloudtrail_2(account, region): - run_create_multi_region_cloudtrail('pci-cloudtrail.2', account, region) + run_create_multi_region_cloudtrail("pci-cloudtrail.2", account, region) + # CIS 2.2 # [CIS.2.2] Ensure CloudTrail log file validation is enabled # def setup_cis22(account, region): - run_enable_cloudtrail_logfile_validation('cis22', account, region) + run_enable_cloudtrail_logfile_validation("cis22", account, region) + def setup_pci_cloudtrail_3(account, region): - run_enable_cloudtrail_logfile_validation('pci-cloudtrail.3', account, region) + run_enable_cloudtrail_logfile_validation("pci-cloudtrail.3", account, region) + # CIS 2.3 # [CIS.2.3] Ensure the S3 bucket used to store CloudTrail logs is not publicly accessible @@ -75,211 +114,249 @@ def setup_pci_cloudtrail_3(account, region): # Setting a bucket up for public access will generate a sev 2 TT and escalation to your manager. # Let's not go there. def setup_cis23(account, region): - run_make_cloudtrail_s3_bucket_private('cis23', account, region) + run_make_cloudtrail_s3_bucket_private("cis23", account, region) + # CIS 2.4 # [CIS.2.4] Ensure CloudTrail trails are integrated with CloudWatch Logs def setup_cis24(account, region): - run_log_cloudtrail_to_cloudwatch('cis24', account, region) + run_log_cloudtrail_to_cloudwatch("cis24", account, region) + def setup_pci_cloudtrail_4(account, region): - run_log_cloudtrail_to_cloudwatch('pci-cloudtrail.4', account, region) + run_log_cloudtrail_to_cloudwatch("pci-cloudtrail.4", account, region) + # CIS 2.6 # [CIS.2.6] Ensure S3 bucket access logging is enabled on the CloudTrail S3 bucket def setup_cis26(account, region): - run_create_ct_access_logging('cis26', account, region) + run_create_ct_access_logging("cis26", account, region) + # CIS 2.8 # [CIS.2.8] Ensure rotation for customer created CMKs is enabled def setup_cis28(account, region): - run_setup_key_rotation('cis28', account, region) + run_setup_key_rotation("cis28", account, region) + # CIS 2.9 # [CIS.2.9] Ensure VPC flow logging is enabled in all VPCs def setup_cis29(account, region): - run_enable_vpc_flow_logs('cis29', account, region) + run_enable_vpc_flow_logs("cis29", account, region) + def setup_afsbp_ec2_6(account, region): - run_enable_vpc_flow_logs('afsbp-ec2.6', account, region) + run_enable_vpc_flow_logs("afsbp-ec2.6", account, region) + def setup_pci_ec2_6(account, region): - run_enable_vpc_flow_logs('pci-ec2.6', account, region) + run_enable_vpc_flow_logs("pci-ec2.6", account, region) + # CIS 3.1-3.14 def setup_cis31314(account, region): - run_log_and_filter('cis32', account, region) + run_log_and_filter("cis32", account, region) + def setup_pci_cw_1(account, region): - run_log_and_filter('pci-cw.1', account, region) + run_log_and_filter("pci-cw.1", account, region) + # CIS 4.1 - 4.2 # [CIS.4.1] Ensure no security groups allow ingress from 0.0.0.0/0 to port 22 # [CIS.4.2] Ensure no security groups allow ingress from 0.0.0.0/0 to port 3389 def setup_cis4142(account, region): - run_disable_public_access_for_security_group('cis4142', account, region) + run_disable_public_access_for_security_group("cis4142", account, region) + # CIS 4.3 # [CIS.4.3] Ensure the default security group of every VPC restricts all traffic def setup_cis43(account, region): - run_remove_vpc_default_security_group_rules('cis43', account, region) + run_remove_vpc_default_security_group_rules("cis43", account, region) + # AFSBP AutoScaling.1 # [AFSBP.AutoScaling.1] Auto scaling groups associated with a load balancer should use load balancer health checks def setup_afsbp_autoscaling_1(account, region): - run_autoscaling_1('afsbp-autoscaling.1', account, region) + run_autoscaling_1("afsbp-autoscaling.1", account, region) + def setup_pci_autoscaling_1(account, region): - run_autoscaling_1('pci-autoscaling.1', account, region) + run_autoscaling_1("pci-autoscaling.1", account, region) + # AFSBP CloudTrail.1 # [AFSBP.CloudTrail.1] CloudTrail should be enabled and configured with at least one multi-region trail def setup_afsbp_cloudtrail_1x(account, region): - run_create_cloudtrail_multi_region_trail('afsbp-cloudtrail.1', account, region) + run_create_cloudtrail_multi_region_trail("afsbp-cloudtrail.1", account, region) + # AFSBP CloudTrail.2 # [AFSBP.CloudTrail.2] CloudTrail should have encryption at-rest enabled + def setup_cis27(account, region): - run_enable_ct_encryption('cis27', account, region) + run_enable_ct_encryption("cis27", account, region) + def setup_afsbp_cloudtrail_2(account, region): - run_enable_ct_encryption('afsbp-cloudtrail.2', account, region) + run_enable_ct_encryption("afsbp-cloudtrail.2", account, region) + def setup_pci_cloudtrail_1(account, region): - run_enable_ct_encryption('pci-cloudtrail.1', account, region) + run_enable_ct_encryption("pci-cloudtrail.1", account, region) + # AFSBP Config.1 # [AFSBP.Config.1] AWS Config should be enabled def setup_cis25(account, region): - run_setup_config('cis25', account, region) + run_setup_config("cis25", account, region) + def setup_afsbp_config_1(account, region): - run_setup_config('afsbp-config.1', account, region) + run_setup_config("afsbp-config.1", account, region) + def setup_pci_config_1(account, region): - run_setup_config('pci-config.1', account, region) + run_setup_config("pci-config.1", account, region) + # AFSBP EC2.1 # [AFSBP.EC2.1] EBS snapshots should not be public def setup_afsbp_ec2_1(account, region): - run_remove_public_ec2_snaps('afsbp-ec2.1', account, region) + run_remove_public_ec2_snaps("afsbp-ec2.1", account, region) + def setup_pci_ec2_1(account, region): - run_remove_public_ec2_snaps('pci-ec2.1', account, region) + run_remove_public_ec2_snaps("pci-ec2.1", account, region) + # AFSBP EC2.2 # [AFSBP.EC2.2] The VPC default security group should not allow inbound and outbound traffic def setup_afsbp_ec2_2(account, region): - run_close_default_sg('afsbp-ec2.2', account, region) + run_close_default_sg("afsbp-ec2.2", account, region) + def setup_pci_ec2_2(account, region): - run_close_default_sg('pci-ec2.2', account, region) + run_close_default_sg("pci-ec2.2", account, region) + # AFSBP EC2.7 # [AFSBP.EC2.7] The VPC default security group should not allow inbound and outbound traffic def setup_afsbp_ec2_7(account, region): - run_enable_ebs_encryption_by_default('afsbp-ec2.7', account, region) + run_enable_ebs_encryption_by_default("afsbp-ec2.7", account, region) + # AFSBP GuardDuty.1 # [AFSBP.GuardDuty.1] GuardDuty should be enabled def setup_afsbp_guardduty_1(account, region): - run_guardduty_1('afsbp-guardduty.1', account, region) + run_guardduty_1("afsbp-guardduty.1", account, region) + # # AFSBP Lambda.1 # [AFSBP.Lambda.1] Lambda function policies should prohibit public access def setup_pci_lambda_1(account, region): - run_make_lambda_private('pci-lambda.1', account, region) + run_make_lambda_private("pci-lambda.1", account, region) + def setup_afsbp_lambda_1(account, region): - run_make_lambda_private('afsbp-lambda.1', account, region) + run_make_lambda_private("afsbp-lambda.1", account, region) + # AFSBP RDS.1 # [AFSBP.RDS.1] RDS snapshot should be private def setup_afsbp_rds_1(account, region): - run_make_rds_snapshot_private('afsbp-rds.1', account, region) + run_make_rds_snapshot_private("afsbp-rds.1", account, region) + def setup_pci_rds_1(account, region): - run_make_rds_snapshot_private('pci-rds.1', account, region) + run_make_rds_snapshot_private("pci-rds.1", account, region) + # AFSBP RDS.6 # [AFSBP.RDS.6] Enhanced monitoring should be configured for RDS DB instance def setup_afsbp_rds_6(account, region): - run_enable_enhanced_monitoring_on_rds_instance('afsbp-rds.6', account, region) + run_enable_enhanced_monitoring_on_rds_instance("afsbp-rds.6", account, region) + # AFSBP RDS.7 # [AFSBP.RDS.7] RDS clusters should have deletion protection enabled def setup_afsbp_rds_7(account, region): - run_enable_rds_cluster_deletion_protection('afsbp-rds.7', account, region) + run_enable_rds_cluster_deletion_protection("afsbp-rds.7", account, region) + # AFSBP S3.1 / PCI S3.6 def setup_afsbp_s3_1(account, region): - run_s3_block_public_access('afsbp-s3.1', account, region) + run_s3_block_public_access("afsbp-s3.1", account, region) + def setup_pci_s3_6(account, region): - run_s3_block_public_access('pci-s3.6', account, region) + run_s3_block_public_access("pci-s3.6", account, region) + # AFSBP S3.2-S3.3 / PCI S3.1-S3.2 def setup_afsbp_s3_2(account, region): - run_s3_block_public_bucket_access('afsbp-s3.2', account, region) + run_s3_block_public_bucket_access("afsbp-s3.2", account, region) + def setup_pci_s3_2(account, region): - run_s3_block_public_bucket_access('pci-s3.2', account, region) + run_s3_block_public_bucket_access("pci-s3.2", account, region) + testIdByStandard = { - 'afsbp': { - 'autoscaling.1': setup_afsbp_autoscaling_1, - 'cloudtrail.1': setup_afsbp_cloudtrail_1, - 'cloudtrail.2': setup_afsbp_cloudtrail_2, - 'config.1': setup_afsbp_config_1, - 'ec2.1': setup_afsbp_ec2_1, - 'ec2.2': setup_afsbp_ec2_2, - 'ec2.6': setup_afsbp_ec2_6, - 'ec2.7': setup_afsbp_ec2_7, - 'iam.3': setup_afsbp_iam_3, - 'iam.7': setup_afsbp_iam_7, - 'iam.8': setup_afsbp_iam_8, - 'lambda.1': setup_afsbp_lambda_1, - 'rds.1': setup_afsbp_rds_1, - 'rds.6': setup_afsbp_rds_6, - 'rds.7': setup_afsbp_rds_7, - 's3.1': setup_afsbp_s3_1, - 's3.2': setup_afsbp_s3_2 + "afsbp": { + "autoscaling.1": setup_afsbp_autoscaling_1, + "cloudtrail.1": setup_afsbp_cloudtrail_1, + "cloudtrail.2": setup_afsbp_cloudtrail_2, + "config.1": setup_afsbp_config_1, + "ec2.1": setup_afsbp_ec2_1, + "ec2.2": setup_afsbp_ec2_2, + "ec2.6": setup_afsbp_ec2_6, + "ec2.7": setup_afsbp_ec2_7, + "iam.3": setup_afsbp_iam_3, + "iam.7": setup_afsbp_iam_7, + "iam.8": setup_afsbp_iam_8, + "lambda.1": setup_afsbp_lambda_1, + "rds.1": setup_afsbp_rds_1, + "rds.6": setup_afsbp_rds_6, + "rds.7": setup_afsbp_rds_7, + "s3.1": setup_afsbp_s3_1, + "s3.2": setup_afsbp_s3_2, + }, + "cis": { + "1.3": setup_cis13, + "1.4": setup_cis14, + "1.5": setup_cis15, + "2.1": setup_cis21, + "2.2": setup_cis22, + "2.3": setup_cis23, + "2.4": setup_cis24, + "2.5": setup_cis25, + "2.6": setup_cis26, + "2.7": setup_cis27, + "2.8": setup_cis28, + "2.9": setup_cis29, + "3.1": setup_cis31314, + "4.1": setup_cis4142, + "4.3": setup_cis43, }, - 'cis': { - '1.3': setup_cis13, - '1.4': setup_cis14, - '1.5': setup_cis15, - '2.1': setup_cis21, - '2.2': setup_cis22, - '2.3': setup_cis23, - '2.4': setup_cis24, - '2.5': setup_cis25, - '2.6': setup_cis26, - '2.7': setup_cis27, - '2.8': setup_cis28, - '2.9': setup_cis29, - '3.1': setup_cis31314, - '4.1': setup_cis4142, - '4.3': setup_cis43 + "pci": { + "autoscaling.1": setup_pci_autoscaling_1, + "cloudtrail.1": setup_pci_cloudtrail_1, + "cloudtrail.2": setup_pci_cloudtrail_2, + "cloudtrail.3": setup_pci_cloudtrail_3, + "cloudtrail.4": setup_pci_cloudtrail_4, + "config.1": setup_pci_config_1, + "cw.1": setup_pci_cw_1, + "ec2.1": setup_pci_ec2_1, + "ec2.2": setup_pci_ec2_2, + "ec2.6": setup_pci_ec2_6, + "iam.7": setup_pci_iam_7, + "iam.8": setup_pci_iam_8, + "lambda.1": setup_pci_lambda_1, + "rds.1": setup_pci_rds_1, + "s3.2": setup_pci_s3_2, + "s3.6": setup_pci_s3_6, }, - 'pci': { - 'autoscaling.1': setup_pci_autoscaling_1, - 'cloudtrail.1': setup_pci_cloudtrail_1, - 'cloudtrail.2': setup_pci_cloudtrail_2, - 'cloudtrail.3': setup_pci_cloudtrail_3, - 'cloudtrail.4': setup_pci_cloudtrail_4, - 'config.1': setup_pci_config_1, - 'cw.1': setup_pci_cw_1, - 'ec2.1': setup_pci_ec2_1, - 'ec2.2': setup_pci_ec2_2, - 'ec2.6': setup_pci_ec2_6, - 'iam.7': setup_pci_iam_7, - 'iam.8': setup_pci_iam_8, - 'lambda.1': setup_pci_lambda_1, - 'rds.1': setup_pci_rds_1, - 's3.2': setup_pci_s3_2, - 's3.6': setup_pci_s3_6 - } } diff --git a/simtest/simtest/orchestrator.py b/simtest/simtest/orchestrator.py index 8e308235..1f0c5a0c 100644 --- a/simtest/simtest/orchestrator.py +++ b/simtest/simtest/orchestrator.py @@ -1,34 +1,40 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -from simtest.boto_session import get_session import json +from simtest.boto_session import get_session + _orchestrator = None + def get_orchestrator(): if not _orchestrator: - raise Exception('Orchestrator has not been initialized') + raise Exception("Orchestrator has not been initialized") return _orchestrator + def create_orchestrator(region): global _orchestrator _orchestrator = Orchestrator(region) return _orchestrator + class Orchestrator: def __init__(self, region): self._session = get_session() self._region = region - self._arn = f'arn:{self._session.get_partition()}:states:{self._region}:{self._session.get_account()}:stateMachine:SO0111-SHARR-Orchestrator' + self._arn = f"arn:{self._session.get_partition()}:states:{self._region}:{self._session.get_account()}:stateMachine:SO0111-SHARR-Orchestrator" def invoke(self, payload): try: - sfn = self._session.client('stepfunctions', region_name = self._region) - print(f'Invoking Orchestrator in {self._region}') - sfn.start_execution(stateMachineArn = self._arn, input = json.dumps(payload)) + sfn = self._session.client("stepfunctions", region_name=self._region) + print(f"Invoking Orchestrator in {self._region}") + sfn.start_execution(stateMachineArn=self._arn, input=json.dumps(payload)) except Exception as e: print(e) - print(f'start_execution for Orchestrator step function failed in {self._region}') + print( + f"start_execution for Orchestrator step function failed in {self._region}" + ) def get_region(self): return self._region diff --git a/simtest/simtest/remediation/autoscaling.py b/simtest/simtest/remediation/autoscaling.py index 71ed3b96..b3a6d77d 100644 --- a/simtest/simtest/remediation/autoscaling.py +++ b/simtest/simtest/remediation/autoscaling.py @@ -2,20 +2,23 @@ # SPDX-License-Identifier: Apache-2.0 from simtest.remediation_test import RemediationTest + def run_autoscaling_1(remediation, account, region): - print('This test enables ELB health checks on an autoscaling group.\n') + print("This test enables ELB health checks on an autoscaling group.\n") - print('Manual Setup') - print('============\n') - print('1) Select an Autoscaling group attached to an ELB to test with.') - print('2) Disable ELB health checks') - asg_name = input('\nAutoscaling group name? ') + print("Manual Setup") + print("============\n") + print("1) Select an Autoscaling group attached to an ELB to test with.") + print("2) Disable ELB health checks") + asg_name = input("\nAutoscaling group name? ") test = RemediationTest(remediation, account, wrap_it_in_findings=True) - test.test_json['detail']['findings'][0]['Resources'][0]['Id'] = f'arn:aws:autoscaling:{region}:{account}:autoScalingGroup:785df3481e1-cd66-435d-96de-d6ed5416defd:autoScalingGroupName/{asg_name}' + test.test_json["detail"]["findings"][0]["Resources"][0][ + "Id" + ] = f"arn:aws:autoscaling:{region}:{account}:autoScalingGroup:785df3481e1-cd66-435d-96de-d6ed5416defd:autoScalingGroupName/{asg_name}" test.run() - print('\nVERIFICATION\n============\n') - print(f'1) Verify that ELB health check is enabled on the autoscaling group.') + print("\nVERIFICATION\n============\n") + print("1) Verify that ELB health check is enabled on the autoscaling group.") diff --git a/simtest/simtest/remediation/aws_lambda.py b/simtest/simtest/remediation/aws_lambda.py index 43181c97..29bdd34b 100644 --- a/simtest/simtest/remediation/aws_lambda.py +++ b/simtest/simtest/remediation/aws_lambda.py @@ -1,64 +1,79 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -from simtest.remediation_test import RemediationTest -from simtest.boto_session import get_session from botocore.exceptions import ClientError +from simtest.boto_session import get_session +from simtest.remediation_test import RemediationTest + + def run_make_lambda_private(remediation, account, region): aws = get_session() - print('This test removes public permissions to Lambdas.\n') + print("This test removes public permissions to Lambdas.\n") - print('WARNING: This test may result in a Sev 2!\n') - input('Press ENTER to confirm that you read the warning.') + print("WARNING: This test may result in a Sev 2!\n") + input("Press ENTER to confirm that you read the warning.") lambda_name = None if account == aws.get_account(): - print('Automatic Setup\n') - print('===============\n') - print('1) Create a lambda in the test account using the "Hello, world!" example code.') - lambda_name = input('Lambda function name: ') + print("Automatic Setup\n") + print("===============\n") + print( + '1) Create a lambda in the test account using the "Hello, world!" example code.' + ) + lambda_name = input("Lambda function name: ") make_lambda_public(lambda_name) else: - print('Manual Setup\n') - print('===============\n') - print('1) Create a lambda in the test account using the "Hello, world!" example code.') - print('2) Make the lambda public by running the following CLI in the target account:') - print(' aws lambda add-permission --function-name --statement-id SHARRTest --action lambda:InvokeFunction --principal \'*\'') - lambda_name = input('Enter the name of the test lambda: ') + print("Manual Setup\n") + print("===============\n") + print( + '1) Create a lambda in the test account using the "Hello, world!" example code.' + ) + print( + "2) Make the lambda public by running the following CLI in the target account:" + ) + print( + " aws lambda add-permission --function-name --statement-id SHARRTest --action lambda:InvokeFunction --principal '*'" + ) + lambda_name = input("Enter the name of the test lambda: ") test = RemediationTest(remediation, account, wrap_it_in_findings=True) - test.test_json['detail']['findings'][0]['Resources'][0]['Id'] = 'AWS::::Account:' + account - test.test_json['detail']['findings'][0]['Resources'][0]['Details']['AwsLambdaFunction']['FunctionName'] = lambda_name + test.test_json["detail"]["findings"][0]["Resources"][0]["Id"] = ( + "AWS::::Account:" + account + ) + test.test_json["detail"]["findings"][0]["Resources"][0]["Details"][ + "AwsLambdaFunction" + ]["FunctionName"] = lambda_name test.run() - print('\nVERIFICATION\n============\n') - print(f'1) {lambda_name} is no longer public') + print("\nVERIFICATION\n============\n") + print(f"1) {lambda_name} is no longer public") + def make_lambda_public(functionname): """ This will result in a sev 2, so know what you are doing! """ aws = get_session() - lmb = aws.client('lambda') + lmb = aws.client("lambda") try: lmb.add_permission( FunctionName=functionname, - StatementId='SHARRTest', - Action='lambda:InvokeFunction', - Principal='*' - ) + StatementId="SHARRTest", + Action="lambda:InvokeFunction", + Principal="*", + ) except ClientError as ex: - exception_type = ex.response['Error']['Code'] + exception_type = ex.response["Error"]["Code"] # stream did exist but need new token, get it from exception data if exception_type == "ResourceNotFoundException": - print(f'{functionname} does not exist.') + print(f"{functionname} does not exist.") exit() else: - print(f'Unhandled client error {exception_type}') + print(f"Unhandled client error {exception_type}") raise except Exception as e: print(e) - print('Something went wrong') + print("Something went wrong") raise diff --git a/simtest/simtest/remediation/cloudtrail.py b/simtest/simtest/remediation/cloudtrail.py index 2ae1122a..346510a6 100644 --- a/simtest/simtest/remediation/cloudtrail.py +++ b/simtest/simtest/remediation/cloudtrail.py @@ -1,240 +1,294 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -from simtest.remediation_test import RemediationTest, ControlTest -from simtest.boto_session import get_session from botocore.exceptions import ClientError +from simtest.boto_session import get_session +from simtest.remediation_test import ControlTest, RemediationTest + + def run_create_multi_region_cloudtrail(remediation, account, region): aws = get_session() remtest = ControlTest() remtest.load_json(remediation, wrap_it_in_findings=True) - remtest.test_json['detail']['findings'][0]['AwsAccountId'] = account - remtest.test_json['detail']['findings'][0]['Resources'][0]['Id'] = 'AWS::::Account:' + account + remtest.test_json["detail"]["findings"][0]["AwsAccountId"] = account + remtest.test_json["detail"]["findings"][0]["Resources"][0]["Id"] = ( + "AWS::::Account:" + account + ) remtest.create_finding() - remtest.print_heading(f'This test creates a multi-region CloudTrail named "multi-region-cloud-trail", an S3 bucket for CloudTrail logging, and an S3 bucket for logging access to the CloudTrail bucket. If the buckets already exist the remediation should still succeed. Bucket names are so0111-access-logs-{region}-{account} and so0111-aws-cloudtrail-{account}\n') + remtest.print_heading( + f'This test creates a multi-region CloudTrail named "multi-region-cloud-trail", an S3 bucket for CloudTrail logging, and an S3 bucket for logging access to the CloudTrail bucket. If the buckets already exist the remediation should still succeed. Bucket names are so0111-access-logs-{region}-{account} and so0111-aws-cloudtrail-{account}\n' + ) if account == aws.get_account(): instructions = [ 'Test setup will remove the CloudTrail named "multi-region-cloud-trail", if it exists already,', - f'then the two buckets, so0111-access-logs-{region}-{account} and so0111-aws-cloudtrail-{account}', - 'Note: Step 2 may take a while' - ] + f"then the two buckets, so0111-access-logs-{region}-{account} and so0111-aws-cloudtrail-{account}", + "Note: Step 2 may take a while", + ] remtest.print_prep_instructions(instructions) - input('Press enter to set up the test') + input("Press enter to set up the test") - delete_cloudtrail('multi-region-cloud-trail') - delete_bucket(f'so0111-aws-cloudtrail-{account}') - delete_bucket(f'so0111-access-logs-{region}-{account}') + delete_cloudtrail("multi-region-cloud-trail") + delete_bucket(f"so0111-aws-cloudtrail-{account}") + delete_bucket(f"so0111-access-logs-{region}-{account}") else: instructions = [ '1) Remove the CloudTrail named "multi-region-cloud-trail", if it exists already.', - f'2) Remove the two buckets, so0111-access-logs-{region}-{account} and so0111-aws-cloudtrail-{account}' + f"2) Remove the two buckets, so0111-access-logs-{region}-{account} and so0111-aws-cloudtrail-{account}", ] remtest.print_prep_instructions(instructions) remtest.run() - remtest.print_verification_instructions('Verify that a multi-region cloudtrail was created with S3 bucket') + remtest.print_verification_instructions( + "Verify that a multi-region cloudtrail was created with S3 bucket" + ) + def run_enable_cloudtrail_logfile_validation(remediation, account, region): - print('This test requires a CloudTrail in the region being tested. You can create one or use an existing trail. If you create one, deselect log file validation. This test will enable it.\n') + print( + "This test requires a CloudTrail in the region being tested. You can create one or use an existing trail. If you create one, deselect log file validation. This test will enable it.\n" + ) - print('SETUP\n=====\n') - print('1) Create a CloudTrail') - print('\tLog File Validation should be FALSE') - print('\tTrail must be in the same region as the test\n') + print("SETUP\n=====\n") + print("1) Create a CloudTrail") + print("\tLog File Validation should be FALSE") + print("\tTrail must be in the same region as the test\n") test = RemediationTest(remediation, account, wrap_it_in_findings=True) # Alter the test data - trail_name = input('Name of CloudTrail: ') + trail_name = input("Name of CloudTrail: ") - test.test_json['detail']['findings'][0]['Resources'][0]['Id'] = 'arn:aws:cloudtrail:' + \ - region + ':111111111111:trail/' + trail_name + test.test_json["detail"]["findings"][0]["Resources"][0]["Id"] = ( + "arn:aws:cloudtrail:" + region + ":111111111111:trail/" + trail_name + ) test.run() - print('\nVERIFICATION\n============\n') - print(f'1) {trail_name} has log file validation enabled') + print("\nVERIFICATION\n============\n") + print(f"1) {trail_name} has log file validation enabled") def run_make_cloudtrail_s3_bucket_private(remediation, account, region): - print('This test disables public access to a bucket. Rather than create a public bucket (which will result in an internal SEV2 ticket), use any private bucket or create a new bucket.\n') + print( + "This test disables public access to a bucket. Rather than create a public bucket (which will result in an internal SEV2 ticket), use any private bucket or create a new bucket.\n" + ) - print('SETUP\n=====\n') - print('1) Create an S3 bucket in the same region as the test. DO NOT MAKE IT PUBLIC!') - print('(the test will still set private access, even if already private)\n') + print("SETUP\n=====\n") + print( + "1) Create an S3 bucket in the same region as the test. DO NOT MAKE IT PUBLIC!" + ) + print("(the test will still set private access, even if already private)\n") - test = RemediationTest('cis23', account) + test = RemediationTest("cis23", account) # Alter the test data - bucket_name = input('Name of an S3 bucket: ') - test.test_json['detail']['findings'][0]['Resources'][0]['Id'] = 'arn:aws:s3:::' + bucket_name + bucket_name = input("Name of an S3 bucket: ") + test.test_json["detail"]["findings"][0]["Resources"][0]["Id"] = ( + "arn:aws:s3:::" + bucket_name + ) test.run() -def run_log_cloudtrail_to_cloudwatch(remediation, account, region): - print('This test creates a CloudWatch logs group for CloudTrail.\n') +def run_log_cloudtrail_to_cloudwatch(remediation, account, region): + print("This test creates a CloudWatch logs group for CloudTrail.\n") - print('SETUP\n=====\n') - print('1) Use the CloudTrail created for cis22 (or create a new one)\n') + print("SETUP\n=====\n") + print("1) Use the CloudTrail created for cis22 (or create a new one)\n") test = RemediationTest(remediation, account, wrap_it_in_findings=True) # Alter the test data - trail_name = input('Name of CloudTrail: ') - test.test_json['detail']['findings'][0]['Resources'][0]['Id'] = 'arn:aws:cloudtrail:' + \ - region + ':111111111111:trail/' + trail_name + trail_name = input("Name of CloudTrail: ") + test.test_json["detail"]["findings"][0]["Resources"][0]["Id"] = ( + "arn:aws:cloudtrail:" + region + ":111111111111:trail/" + trail_name + ) test.run() - print('Verify that CloudWatch Logs Group CloudTrail/CIS2-4-' + trail_name + ' was created in the target account.\n') + print( + "Verify that CloudWatch Logs Group CloudTrail/CIS2-4-" + + trail_name + + " was created in the target account.\n" + ) + def run_create_ct_access_logging(remediation, account, region): - print('This test creates an access logging bucket the CloudTrail S3 bucket.\n') + print("This test creates an access logging bucket the CloudTrail S3 bucket.\n") - print('SETUP\n=====\n') - print('1) Use the S3 bucket created for cis23 (or create a new one)\n') + print("SETUP\n=====\n") + print("1) Use the S3 bucket created for cis23 (or create a new one)\n") test = RemediationTest(remediation, account) # Alter the test data - bucket_name = input('Name of an S3 bucket: ') - test.test_json['detail']['findings'][0]['Resources'][0]['Id'] = 'arn:aws:s3:::' + bucket_name + bucket_name = input("Name of an S3 bucket: ") + test.test_json["detail"]["findings"][0]["Resources"][0]["Id"] = ( + "arn:aws:s3:::" + bucket_name + ) test.run() - print('\nVerify that S3 bucket so0111-sharr-cloudtrailaccesslogs--' + region + ' was created in the target account. If the bucket already existed then simply check the lambda logs for errors.') + print( + "\nVerify that S3 bucket so0111-sharr-cloudtrailaccesslogs--" + + region + + " was created in the target account. If the bucket already existed then simply check the lambda logs for errors." + ) + def run_enable_ct_encryption(remediation, account, region): aws = get_session() - print(f'This test enables encryption on a CloudTrail\n') + print("This test enables encryption on a CloudTrail\n") - print('Automatic Setup\n') - print('===============\n') - print('1) Removes KmsKeyId from the test cloudtrail') + print("Automatic Setup\n") + print("===============\n") + print("1) Removes KmsKeyId from the test cloudtrail") - cloudtrail = input('CloudTrail to test with? ') + cloudtrail = input("CloudTrail to test with? ") remove_cloudtrail_encryption(cloudtrail, account) test = RemediationTest(remediation, account, wrap_it_in_findings=True) - test.test_json['detail']['findings'][0]['Resources'][0]['Id'] = f'arn:{aws.get_partition()}:cloudtrail:{region}:{account}:trail/{cloudtrail}' - test.test_json['detail']['findings'][0]['Resources'][0]['Details']['AwsCloudTrailTrail']['HomeRegion'] = region + test.test_json["detail"]["findings"][0]["Resources"][0][ + "Id" + ] = f"arn:{aws.get_partition()}:cloudtrail:{region}:{account}:trail/{cloudtrail}" + test.test_json["detail"]["findings"][0]["Resources"][0]["Details"][ + "AwsCloudTrailTrail" + ]["HomeRegion"] = region test.run() - print('\nVERIFICATION\n============\n') - print(f'1) CloudTrail {cloudtrail} is encrypted') + print("\nVERIFICATION\n============\n") + print(f"1) CloudTrail {cloudtrail} is encrypted") + def run_create_cloudtrail_multi_region_trail(remediation, account, region): aws = get_session() - print('Simulate AFSBP CloudTrail.1 Findings\n') + print("Simulate AWS FSBP CloudTrail.1 Findings\n") - print(f'This test creates a multi-region CloudTrail named "multi-region-cloud-trail", an S3 bucket for CloudTrail logging, and an S3 bucket for logging access to the CloudTrail bucket. If the buckets already exist the remediation should still succeed. Bucket names are so0111-access-logs-{region}-{account} and so0111-aws-cloudtrail-{account}\n') + print( + f'This test creates a multi-region CloudTrail named "multi-region-cloud-trail", an S3 bucket for CloudTrail logging, and an S3 bucket for logging access to the CloudTrail bucket. If the buckets already exist the remediation should still succeed. Bucket names are so0111-access-logs-{region}-{account} and so0111-aws-cloudtrail-{account}\n' + ) if account == aws.get_account(): - print('Automatic Setup\n') - print('===============\n') - print('1) Remove the CloudTrail named "multi-region-cloud-trail", if it exists already.') - print(f'2) Remove the two buckets, so0111-access-logs-{region}-{account} and so0111-aws-cloudtrail-{account}') - print('Note: Step 2 may take a while') - input('HIT ENTER TO START') - - delete_cloudtrail('multi-region-cloud-trail') - delete_bucket(f'so0111-aws-cloudtrail-{account}') - delete_bucket(f'so0111-access-logs-{region}-{account}') + print("Automatic Setup\n") + print("===============\n") + print( + '1) Remove the CloudTrail named "multi-region-cloud-trail", if it exists already.' + ) + print( + f"2) Remove the two buckets, so0111-access-logs-{region}-{account} and so0111-aws-cloudtrail-{account}" + ) + print("Note: Step 2 may take a while") + input("HIT ENTER TO START") + + delete_cloudtrail("multi-region-cloud-trail") + delete_bucket(f"so0111-aws-cloudtrail-{account}") + delete_bucket(f"so0111-access-logs-{region}-{account}") else: - print('Manual Setup\n') - print('===============\n') - print('1) Remove the CloudTrail named "multi-region-cloud-trail", if it exists already.') - print(f'2) Remove the two buckets, so0111-access-logs-{region}-{account} and so0111-aws-cloudtrail-{account}') - input('Press enter when ready...') + print("Manual Setup\n") + print("===============\n") + print( + '1) Remove the CloudTrail named "multi-region-cloud-trail", if it exists already.' + ) + print( + f"2) Remove the two buckets, so0111-access-logs-{region}-{account} and so0111-aws-cloudtrail-{account}" + ) + input("Press enter when ready...") test = RemediationTest(remediation, account, wrap_it_in_findings=True) - test.test_json['detail']['findings'][0]['Resources'][0]['Id'] = 'AWS::::Account:' + account + test.test_json["detail"]["findings"][0]["Resources"][0]["Id"] = ( + "AWS::::Account:" + account + ) test.run() - print('\nVERIFICATION\n============\n') - print('1) CloudTrail "multi-region-cloud-trail" was created, is encrypted, and is enabled for all regions') - print(f'2) Buckets so0111-access-logs-{region}-{account} and so0111-aws-cloudtrail-{account} were created') - print(f'3) CloudTrail log data is flowing to the so0111-aws-cloudtrail-{account} bucket') - print(f'4) Access logs for so0111-aws-cloudtrail-{account} bucket are delivered to so0111-access-logs-{region}-{account} bucket') + print("\nVERIFICATION\n============\n") + print( + '1) CloudTrail "multi-region-cloud-trail" was created, is encrypted, and is enabled for all regions' + ) + print( + f"2) Buckets so0111-access-logs-{region}-{account} and so0111-aws-cloudtrail-{account} were created" + ) + print( + f"3) CloudTrail log data is flowing to the so0111-aws-cloudtrail-{account} bucket" + ) + print( + f"4) Access logs for so0111-aws-cloudtrail-{account} bucket are delivered to so0111-access-logs-{region}-{account} bucket" + ) + def delete_cloudtrail(trailname): aws = get_session() - ct = aws.client('cloudtrail') + ct = aws.client("cloudtrail") try: - ct.delete_trail( - Name=trailname - ) - print(f'Deleted CloudTrail multi-region-cloud-trail') + ct.delete_trail(Name=trailname) + print("Deleted CloudTrail multi-region-cloud-trail") except ClientError as ex: - exception_type = ex.response['Error']['Code'] + exception_type = ex.response["Error"]["Code"] # stream did exist but need new token, get it from exception data if exception_type in "TrailNotFoundException": - print('Trail does not exist...continuing') + print("Trail does not exist...continuing") else: - print(f'Unhandled client error {exception_type}') + print(f"Unhandled client error {exception_type}") raise except Exception as e: print(e) - print('Something went wrong') + print("Something went wrong") raise + def remove_cloudtrail_encryption(trailname, account): aws = get_session() if account == aws.get_account(): - ct = aws.client('cloudtrail') + ct = aws.client("cloudtrail") try: - ct.update_trail( - Name=trailname, - KmsKeyId='' - ) - print(f'Removed CloudTrail encryption from {trailname}') + ct.update_trail(Name=trailname, KmsKeyId="") + print(f"Removed CloudTrail encryption from {trailname}") except ClientError as ex: - exception_type = ex.response['Error']['Code'] + exception_type = ex.response["Error"]["Code"] # stream did exist but need new token, get it from exception data if exception_type in "TrailNotFoundException": - print('Trail does not exist') + print("Trail does not exist") exit() else: - print(f'Unhandled client error {exception_type}') + print(f"Unhandled client error {exception_type}") raise except Exception as e: print(e) - print('Something went wrong') + print("Something went wrong") raise else: - print(f'Manually disable encryption on {trailname} in {account}') - input('ENTER to continue...') + print(f"Manually disable encryption on {trailname} in {account}") + input("ENTER to continue...") + def delete_bucket(bucketname): aws = get_session() - s3_resource = aws.resource('s3') - s3 = aws.client('s3') + s3_resource = aws.resource("s3") + s3 = aws.client("s3") try: bucket = s3_resource.Bucket(bucketname) bucket.objects.all().delete() - s3.delete_bucket( - Bucket=bucketname - ) - print(f'Deleted CloudTrail multi-region-cloud-trail') + s3.delete_bucket(Bucket=bucketname) + print("Deleted CloudTrail multi-region-cloud-trail") except ClientError as ex: - exception_type = ex.response['Error']['Code'] + exception_type = ex.response["Error"]["Code"] # stream did exist but need new token, get it from exception data if exception_type in "NoSuchBucket": - print(f'Bucket {bucketname} does not exist...continuing') + print(f"Bucket {bucketname} does not exist...continuing") else: - print(f'Unhandled client error {exception_type} deleting bucket {bucketname}') + print( + f"Unhandled client error {exception_type} deleting bucket {bucketname}" + ) raise except Exception as e: print(e) - print('Something went wrong') + print("Something went wrong") raise diff --git a/simtest/simtest/remediation/cloudwatch.py b/simtest/simtest/remediation/cloudwatch.py index 1d8f5503..01e8a613 100644 --- a/simtest/simtest/remediation/cloudwatch.py +++ b/simtest/simtest/remediation/cloudwatch.py @@ -2,11 +2,12 @@ # SPDX-License-Identifier: Apache-2.0 from simtest.remediation_test import RemediationTest + def run_log_and_filter(remediation, account, region): - print('This test creates a log metric filter and alarm') + print("This test creates a log metric filter and alarm") - print('SETUP\n=====\n') - print('None required.\n') + print("SETUP\n=====\n") + print("None required.\n") test = RemediationTest(remediation, account, wrap_it_in_findings=True) @@ -17,6 +18,8 @@ def run_log_and_filter(remediation, account, region): test.run() - print('\nOpen the Log Group (from SSM Parameter Solutions/SO0111/Metrics_LogGroupName.') + print( + "\nOpen the Log Group (from SSM Parameter Solutions/SO0111/Metrics_LogGroupName." + ) print('Click "Metric filters"') - print('Verify that the metric for SHARR exists and has an alarm defined.') + print("Verify that the metric for SHARR exists and has an alarm defined.") diff --git a/simtest/simtest/remediation/config.py b/simtest/simtest/remediation/config.py index 797674b3..2a677a1e 100644 --- a/simtest/simtest/remediation/config.py +++ b/simtest/simtest/remediation/config.py @@ -1,108 +1,125 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 +from botocore.exceptions import ClientError + +from simtest.boto_session import get_session from simtest.remediation.cloudtrail import delete_bucket from simtest.remediation_test import RemediationTest -from simtest.boto_session import get_session -from botocore.exceptions import ClientError + def run_setup_config(remediation, account, region): aws = get_session() - print(f'This test enables AWS Config\n') + print("This test enables AWS Config\n") if account == aws.get_account(): - print('Automatic Setup\n') - print('===============\n') - print('1) Disable AWS Config') - print('2) Remove SNS Topic SO0111-SHARR-AFSBP-Config-1-AWSConfigNotification') - print(f'3) Remove only the config bucket, so0111-aws-config-{region}-{account}. This tests that the remediation can use the existing access logging bucket, if you ran CloudTrail.1 before Config.1') - print('Note: Step 3 may take a while') - input('HIT ENTER TO START') + print("Automatic Setup\n") + print("===============\n") + print("1) Disable AWS Config") + print("2) Remove SNS Topic SO0111-SHARR-AFSBP-Config-1-AWSConfigNotification") + print( + f"3) Remove only the config bucket, so0111-aws-config-{region}-{account}. This tests that the remediation can use the existing access logging bucket, if you ran CloudTrail.1 before Config.1" + ) + print("Note: Step 3 may take a while") + input("HIT ENTER TO START") delete_default_config_recorder() delete_default_delivery_channel() - delete_sns_topic('SO0111-SHARR-AFSBP-Config-1-AWSConfigNotification', account, region) - delete_bucket(f'so0111-aws-config-{region}-{account}') + delete_sns_topic( + "SO0111-SHARR-AFSBP-Config-1-AWSConfigNotification", account, region + ) + delete_bucket(f"so0111-aws-config-{region}-{account}") else: - print('Manual Setup\n') - print('===============\n') - print('1) Disable AWS Config by disabling recording') - print('2) Remove SNS Topic SO0111-SHARR-AFSBP-Config-1-AWSConfigNotification') - print(f'3) Remove the config bucket, so0111-aws-config-{region}-{account}. This tests that the remediation can use the existing access logging bucket, if you ran CloudTrail.1 before Config.1') - print('Note: Step 3 may take a while') - input('\nHit enter when you have completed these steps') + print("Manual Setup\n") + print("===============\n") + print("1) Disable AWS Config by disabling recording") + print("2) Remove SNS Topic SO0111-SHARR-AFSBP-Config-1-AWSConfigNotification") + print( + f"3) Remove the config bucket, so0111-aws-config-{region}-{account}. This tests that the remediation can use the existing access logging bucket, if you ran CloudTrail.1 before Config.1" + ) + print("Note: Step 3 may take a while") + input("\nHit enter when you have completed these steps") test = RemediationTest(remediation, account, wrap_it_in_findings=True) - test.test_json['detail']['findings'][0]['Resources'][0]['Id'] = 'AWS::::Account:' + account + test.test_json["detail"]["findings"][0]["Resources"][0]["Id"] = ( + "AWS::::Account:" + account + ) test.run() - print('\nVERIFICATION\n============\n') - print(f'1) Config is enabled for all resources, globally, with logging to so0111-aws-config-{region}-{account}') - print(f'2) Bucket so0111-aws-config-{region}-{account} is encrypted and has access logging enabled') - print(f'3) Config data is flowing to the config bucket, so0111-aws-config-{region}-{account}') + print("\nVERIFICATION\n============\n") + print( + f"1) Config is enabled for all resources, globally, with logging to so0111-aws-config-{region}-{account}" + ) + print( + f"2) Bucket so0111-aws-config-{region}-{account} is encrypted and has access logging enabled" + ) + print( + f"3) Config data is flowing to the config bucket, so0111-aws-config-{region}-{account}" + ) + def delete_default_config_recorder(): aws = get_session() - cfgsvc = aws.client('config') + cfgsvc = aws.client("config") try: - cfgsvc.delete_configuration_recorder( - ConfigurationRecorderName='default' - ) + cfgsvc.delete_configuration_recorder(ConfigurationRecorderName="default") print('Deleted AWS Config recorder "default"') except ClientError as ex: - exception_type = ex.response['Error']['Code'] + exception_type = ex.response["Error"]["Code"] # stream did exist but need new token, get it from exception data if exception_type == "NoSuchConfigurationRecorderException": - print(f'Default delivery recorder does not exist...continuing') + print("Default delivery recorder does not exist...continuing") else: - print(f'Unhandled client error {exception_type} deleting default recorder') + print(f"Unhandled client error {exception_type} deleting default recorder") raise except Exception as e: print(e) - print('Something went wrong') + print("Something went wrong") raise + def delete_default_delivery_channel(): aws = get_session() - cfgsvc = aws.client('config') + cfgsvc = aws.client("config") try: - cfgsvc.delete_delivery_channel( - DeliveryChannelName='default' - ) + cfgsvc.delete_delivery_channel(DeliveryChannelName="default") print('Deleted AWS Config delivery channel "default"') except ClientError as ex: - exception_type = ex.response['Error']['Code'] + exception_type = ex.response["Error"]["Code"] # stream did exist but need new token, get it from exception data if exception_type == "NoSuchDeliveryChannelException": - print(f'Default delivery channel does not exist...continuing') + print("Default delivery channel does not exist...continuing") else: - print(f'Unhandled client error {exception_type} deleting default delivery channel') + print( + f"Unhandled client error {exception_type} deleting default delivery channel" + ) raise except Exception as e: print(e) - print('Something went wrong') + print("Something went wrong") raise + def delete_sns_topic(topicname, account, region): aws = get_session() - sns = aws.client('sns') - topic_arn = f'arn:{aws.get_partition()}:sns:{region}:{account}:{topicname}' + sns = aws.client("sns") + topic_arn = f"arn:{aws.get_partition()}:sns:{region}:{account}:{topicname}" try: - sns.delete_topic( - TopicArn=topic_arn - ) - print(f'Deleted Amazon SNS topic {topic_arn}') + sns.delete_topic(TopicArn=topic_arn) + print(f"Deleted Amazon SNS topic {topic_arn}") except ClientError as ex: - exception_type = ex.response['Error']['Code'] + exception_type = ex.response["Error"]["Code"] # stream did exist but need new token, get it from exception data if exception_type == "NotFoundException": - print(f'{topicname} does not exist...continuing') + print(f"{topicname} does not exist...continuing") else: - print(f'Unhandled client error {exception_type} deleting sns topic {topicname}') + print( + f"Unhandled client error {exception_type} deleting sns topic {topicname}" + ) raise except Exception as e: print(e) - print('Something went wrong') + print("Something went wrong") raise diff --git a/simtest/simtest/remediation/ec2.py b/simtest/simtest/remediation/ec2.py index f233d8ae..92273fd3 100644 --- a/simtest/simtest/remediation/ec2.py +++ b/simtest/simtest/remediation/ec2.py @@ -1,146 +1,179 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -from simtest.remediation_test import RemediationTest from simtest.boto_session import get_session +from simtest.remediation_test import RemediationTest + def run_remove_public_ec2_snaps(remediation, account, region): aws = get_session() - print('This test removes public permissions from an EC2 Snapshot\n') + print("This test removes public permissions from an EC2 Snapshot\n") - print('WARNING: This test may result in a Sev 2!\n') - input('Press ENTER to confirm that you read the warning.') + print("WARNING: This test may result in a Sev 2!\n") + input("Press ENTER to confirm that you read the warning.") snapshot_id = None if account == aws.get_account(): - print('Automatic Setup\n') - print('===============\n') - print('1) Select a snapshot from your test account. Snapshot must not have sensitive, customer, or production data!') - input('HIT ENTER TO START') + print("Automatic Setup\n") + print("===============\n") + print( + "1) Select a snapshot from your test account. Snapshot must not have sensitive, customer, or production data!" + ) + input("HIT ENTER TO START") - snapshot_id = input('Snapshot Id: ') - ec2 = aws.client('ec2') + snapshot_id = input("Snapshot Id: ") + ec2 = aws.client("ec2") try: ec2.modify_snapshot_attribute( - Attribute='CreateVolumePermission', - CreateVolumePermission={ - 'Add': [{'Group': 'all'}] - }, - SnapshotId=snapshot_id + Attribute="CreateVolumePermission", + CreateVolumePermission={"Add": [{"Group": "all"}]}, + SnapshotId=snapshot_id, ) - print(f'Snapshot {snapshot_id} permissions set to PUBLIC') + print(f"Snapshot {snapshot_id} permissions set to PUBLIC") except Exception as e: print(e) - print('Something went wrong') + print("Something went wrong") else: - print('Manual Setup\n') - print('===============\n') - print('1) Select a snapshot from your test account. Snapshot must not have sensitive, customer, or production data!') - print('2) Change the snapshot permissions to make it public.') - input('Press enter when ready...') + print("Manual Setup\n") + print("===============\n") + print( + "1) Select a snapshot from your test account. Snapshot must not have sensitive, customer, or production data!" + ) + print("2) Change the snapshot permissions to make it public.") + input("Press enter when ready...") test = RemediationTest(remediation, account, wrap_it_in_findings=True) - test.test_json['detail']['findings'][0]['Resources'][0]['Id'] = 'AWS::::Account:' + account - test.test_json['detail']['findings'][0]['Resources'][0]['Region'] = region + test.test_json["detail"]["findings"][0]["Resources"][0]["Id"] = ( + "AWS::::Account:" + account + ) + test.test_json["detail"]["findings"][0]["Resources"][0]["Region"] = region # test.test_json['detail']['findings'][0]['testmode'] = True test.run() - print('\nVERIFICATION\n============\n') - print(f'1) {snapshot_id} is no longer public') - -def run_close_default_sg(remediation, account, region): + print("\nVERIFICATION\n============\n") + print(f"1) {snapshot_id} is no longer public") - print('This test removes open ports in- and outbound from the security group in the finding. You will need to create a security group in the AWS console.\n') - print('Manual Setup\n') - print('============\n') - print(f'Please do the following') - print('1) Create (or select) a default Security Group in your test account. It must be named "default". Allow all inbound and outbound traffic from/to anywhere. Enter the Security Group Id below.') - sg_id = input('Security Group Id?: ') +def run_close_default_sg(remediation, account, region): + print( + "This test removes open ports in- and outbound from the security group in the finding. You will need to create a security group in the AWS console.\n" + ) + + print("Manual Setup\n") + print("============\n") + print("Please do the following") + print( + '1) Create (or select) a default Security Group in your test account. It must be named "default". Allow all inbound and outbound traffic from/to anywhere. Enter the Security Group Id below.' + ) + sg_id = input("Security Group Id?: ") test = RemediationTest(remediation, account, wrap_it_in_findings=True) - test.test_json['detail']['findings'][0]['Resources'][0]['Id'] = f'arn:aws:ec2:{region}:{account}:security-group/{sg_id}' - test.test_json['detail']['findings'][0]['Resources'][0]['Details']['AwsEc2SecurityGroup']['GroupId'] = sg_id + test.test_json["detail"]["findings"][0]["Resources"][0][ + "Id" + ] = f"arn:aws:ec2:{region}:{account}:security-group/{sg_id}" + test.test_json["detail"]["findings"][0]["Resources"][0]["Details"][ + "AwsEc2SecurityGroup" + ]["GroupId"] = sg_id test.run() - print('\nVERIFICATION\n============\n') - print(f'1) {sg_id} both inbound and outbound rules are removed') + print("\nVERIFICATION\n============\n") + print(f"1) {sg_id} both inbound and outbound rules are removed") + def run_disable_public_access_for_security_group(remediation, account, region): - print('Simulate cis4142 Findings\n') - print('This test closes inbound ports on a Security Group.\n') + print("Simulate cis4142 Findings\n") + print("This test closes inbound ports on a Security Group.\n") - print('SETUP\n=====\n') - print('1) Create a Security Group') - print('2) Configure it to allow open inbound access to ports 22 and 3389\n') + print("SETUP\n=====\n") + print("1) Create a Security Group") + print("2) Configure it to allow open inbound access to ports 22 and 3389\n") test = RemediationTest(remediation, account) # Alter the test data - sg_id = input('Security Group: ') - test.test_json['detail']['findings'][0]['Resources'][0]['Id'] = "arn:aws:ec2:us-east-2:111111111111:security-group/" + sg_id - test.test_json['detail']['findings'][0]['Resources'][0]['Details']['AwsEc2SecurityGroup']['GroupId'] = sg_id + sg_id = input("Security Group: ") + test.test_json["detail"]["findings"][0]["Resources"][0]["Id"] = ( + "arn:aws:ec2:us-east-2:111111111111:security-group/" + sg_id + ) + test.test_json["detail"]["findings"][0]["Resources"][0]["Details"][ + "AwsEc2SecurityGroup" + ]["GroupId"] = sg_id test.run() - print('\nThis remediation uses SSM Automation Documents on the target account. Verify that there are no lambda errors.') + print( + "\nThis remediation uses SSM Automation Documents on the target account. Verify that there are no lambda errors." + ) + def run_remove_vpc_default_security_group_rules(remediation, account, region): - print('Simulate cis43 Findings\n') + print("Simulate cis43 Findings\n") - print('This test closes access inbound and outbound for a Securty Group.\n') + print("This test closes access inbound and outbound for a Securty Group.\n") - print('SETUP\n=====\n') - print('1) Create a Security Group') - print('2) Configure it to allow open inbound and outbound access: all protocols from/to anywhere.\n') + print("SETUP\n=====\n") + print("1) Create a Security Group") + print( + "2) Configure it to allow open inbound and outbound access: all protocols from/to anywhere.\n" + ) test = RemediationTest(remediation, account) # Alter the test data - sg_id = input('Security Group: ') - test.test_json['detail']['findings'][0]['Resources'][0]['Id'] = "arn:aws:ec2:us-east-2:111111111111:security-group/" + sg_id - test.test_json['detail']['findings'][0]['Resources'][0]['Details']['AwsEc2SecurityGroup']['GroupId'] = sg_id + sg_id = input("Security Group: ") + test.test_json["detail"]["findings"][0]["Resources"][0]["Id"] = ( + "arn:aws:ec2:us-east-2:111111111111:security-group/" + sg_id + ) + test.test_json["detail"]["findings"][0]["Resources"][0]["Details"][ + "AwsEc2SecurityGroup" + ]["GroupId"] = sg_id test.run() + def run_enable_ebs_encryption_by_default(remediation, account, region): aws = get_session() - print('Simulate AFSBP EC2.7 Findings\n') + print("Simulate AWS FSBP EC2.7 Findings\n") - print('This test enables EBS encryption by default.\n') + print("This test enables EBS encryption by default.\n") if account == aws.get_account(): - print('Automatic Setup\n') - print('============\n') - print('1) Disables EBS encryption by default.') + print("Automatic Setup\n") + print("============\n") + print("1) Disables EBS encryption by default.") - input('HIT ENTER TO START') + input("HIT ENTER TO START") disable_ebs_encryption_by_default() else: - print('Manual Setup\n') - print('============\n') - print('1) Disable EBS encryption by default in the target account:') - print(' EC2 Dashboard->Account Attributes->EBS Encryption') - input('Press enter when ready...') + print("Manual Setup\n") + print("============\n") + print("1) Disable EBS encryption by default in the target account:") + print(" EC2 Dashboard->Account Attributes->EBS Encryption") + input("Press enter when ready...") test = RemediationTest(remediation, account, wrap_it_in_findings=True) - test.test_json['detail']['findings'][0]['Resources'][0]['Id'] = 'AWS::::Account:' + account + test.test_json["detail"]["findings"][0]["Resources"][0]["Id"] = ( + "AWS::::Account:" + account + ) test.run() - print('\nVERIFICATION\n============\n') - print(f'1) In EC2 Dashboard, click EBS Encryption under Account Attributes and confirm that it is enabled.') + print("\nVERIFICATION\n============\n") + print( + "1) In EC2 Dashboard, click EBS Encryption under Account Attributes and confirm that it is enabled." + ) + def disable_ebs_encryption_by_default(): aws = get_session() - ec2 = aws.client('ec2') + ec2 = aws.client("ec2") try: ec2.disable_ebs_encryption_by_default() except Exception as e: print(e) - print('Something went wrong') + print("Something went wrong") raise diff --git a/simtest/simtest/remediation/guardduty.py b/simtest/simtest/remediation/guardduty.py index 1d015b6d..af228e8f 100644 --- a/simtest/simtest/remediation/guardduty.py +++ b/simtest/simtest/remediation/guardduty.py @@ -1,42 +1,46 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -from simtest.remediation_test import RemediationTest from simtest.boto_session import get_session +from simtest.remediation_test import RemediationTest + def run_guardduty_1(remediation, account, region): - print('Simulate AFSBP GuardDuty.1 Findings\n') + print("Simulate AWS FSBP GuardDuty.1 Findings\n") - print('This test enables GuardDuty in the finding region.\n') + print("This test enables GuardDuty in the finding region.\n") - print('Automatic Setup') - print('============\n') - print('1) Delete all GuardDuty detectors.\n') - input('HIT ENTER TO START') + print("Automatic Setup") + print("============\n") + print("1) Delete all GuardDuty detectors.\n") + input("HIT ENTER TO START") delete_all_guardduty_detectors() test = RemediationTest(remediation, account, wrap_it_in_findings=True) - test.test_json['detail']['findings'][0]['Resources'][0]['Id'] = 'AWS::::Account:' + account + test.test_json["detail"]["findings"][0]["Resources"][0]["Id"] = ( + "AWS::::Account:" + account + ) test.run() - print('\nVERIFICATION\n============\n') - print(f'1) In GuardDuty Settings, verify that a Detector is active on a 6 hour interval.') + print("\nVERIFICATION\n============\n") + print( + "1) In GuardDuty Settings, verify that a Detector is active on a 6 hour interval." + ) + def delete_all_guardduty_detectors(): """ Run this only in a test account """ aws = get_session() - gd = aws.client('guardduty') + gd = aws.client("guardduty") try: - detectors = gd.list_detectors().get('DetectorIds') + detectors = gd.list_detectors().get("DetectorIds") for detector in detectors: - gd.delete_detector( - DetectorId=detector - ) + gd.delete_detector(DetectorId=detector) except Exception as e: print(e) - print('Something went wrong') + print("Something went wrong") raise diff --git a/simtest/simtest/remediation/iam.py b/simtest/simtest/remediation/iam.py index 470144d7..8e36d709 100644 --- a/simtest/simtest/remediation/iam.py +++ b/simtest/simtest/remediation/iam.py @@ -1,52 +1,66 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -from simtest.remediation_test import RemediationTest from simtest.boto_session import get_session +from simtest.remediation_test import RemediationTest + def run_remove_old_credentials(remediation, account, region): - print('A full test requires an IAM user ID with keys/credentials older than 90 days. Unless you have a time machine, you may wish to simply test that the permissions allow the API calls without remediating any actual keys.\n') - print('SETUP\n=====\n') - print('1) create an IAM user in the target account') - print('2) Create an access key\n') - print('Note: the test will fail if the IAM user has no active credentials.\n') + print( + "A full test requires an IAM user ID with keys/credentials older than 90 days. Unless you have a time machine, you may wish to simply test that the permissions allow the API calls without remediating any actual keys.\n" + ) + print("SETUP\n=====\n") + print("1) create an IAM user in the target account") + print("2) Create an access key\n") + print("Note: the test will fail if the IAM user has no active credentials.\n") test = RemediationTest(remediation, account, True) # key user - key_user = input('Name of an IAM user: ') - key_user_id = get_userid_from_name(key_user, region) - test.test_json['detail']['findings'][0]['Resources'][0]['Id'] = 'arn:aws-cn:iam::111111111111:user/' + key_user - test.test_json['detail']['findings'][0]['Resources'][0]['Details']['AwsIamUser']['UserId'] = key_user_id + key_user = input("Name of an IAM user: ") + key_user_id = get_userid_from_name(key_user) + test.test_json["detail"]["findings"][0]["Resources"][0]["Id"] = ( + "arn:aws-cn:iam::111111111111:user/" + key_user + ) + test.test_json["detail"]["findings"][0]["Resources"][0]["Details"]["AwsIamUser"][ + "UserId" + ] = key_user_id test.run() + def run_revoke_unrotated_keys(remediation, account, region): test = RemediationTest(remediation, account, True) - key_user = input('Name of an IAM user: ') - key_user_id = get_userid_from_name(key_user, region) - test.test_json['detail']['findings'][0]['Resources'][0]['Id'] = 'arn:aws-cn:iam::111111111111:user/' + key_user - test.test_json['detail']['findings'][0]['Resources'][0]['Details']['AwsIamUser']['UserId'] = key_user_id + key_user = input("Name of an IAM user: ") + key_user_id = get_userid_from_name(key_user) + test.test_json["detail"]["findings"][0]["Resources"][0]["Id"] = ( + "arn:aws-cn:iam::111111111111:user/" + key_user + ) + test.test_json["detail"]["findings"][0]["Resources"][0]["Details"]["AwsIamUser"][ + "UserId" + ] = key_user_id test.run() + def run_set_password_policy(remediation, account, region): - print('Remediates the finding by putting in place an account password policy.\n') - print('SETUP\n=====\n') - print('1) Go to IAM in the console') - print('2) Remove the target account\'s password policy\n') - input('Hit enter when ready') + print("Remediates the finding by putting in place an account password policy.\n") + print("SETUP\n=====\n") + print("1) Go to IAM in the console") + print("2) Remove the target account's password policy\n") + input("Hit enter when ready") test = RemediationTest(remediation, account, True) test.run() - print('\nVERIFICATION\n============\n') - print('1) Verify that the Account password policy is established') + print("\nVERIFICATION\n============\n") + print("1) Verify that the Account password policy is established") + def get_userid_from_name(username): aws = get_session() - iam = aws.client('iam') - for user in iam.list_users().get('Users', []): - if username == user.get('UserName',''): - return user.get('UserId', None) + iam = aws.client("iam") + for user in iam.list_users().get("Users", []): + if username == user.get("UserName", ""): + return user.get("UserId", None) diff --git a/simtest/simtest/remediation/kms.py b/simtest/simtest/remediation/kms.py index 2882c4c1..35f42640 100644 --- a/simtest/simtest/remediation/kms.py +++ b/simtest/simtest/remediation/kms.py @@ -2,18 +2,25 @@ # SPDX-License-Identifier: Apache-2.0 from simtest.remediation_test import RemediationTest + def run_setup_key_rotation(remediation, account, region): - print('This test enables rotation on a KMS key.\n') + print("This test enables rotation on a KMS key.\n") - print('SETUP\n=====\n') - print('1) Create a symmetric KMS Customer-Managed Key, leaving CMK rotation disabled.\n') + print("SETUP\n=====\n") + print( + "1) Create a symmetric KMS Customer-Managed Key, leaving CMK rotation disabled.\n" + ) test = RemediationTest(remediation, account) # Alter the test data - key_id = input('CMK Id: ') - test.test_json['detail']['findings'][0]['Resources'][0]['Id'] = 'AWS::KMS::Key:' + key_id + key_id = input("CMK Id: ") + test.test_json["detail"]["findings"][0]["Resources"][0]["Id"] = ( + "AWS::KMS::Key:" + key_id + ) test.run() - print('\nVerify that Key rotation is enabled for the key. If it was already enabled, check the lambda logs for errors. Optionally, disable key rotation and rerun this test.') + print( + "\nVerify that Key rotation is enabled for the key. If it was already enabled, check the lambda logs for errors. Optionally, disable key rotation and rerun this test." + ) diff --git a/simtest/simtest/remediation/rds.py b/simtest/simtest/remediation/rds.py index 6508ad6f..cafb82b1 100644 --- a/simtest/simtest/remediation/rds.py +++ b/simtest/simtest/remediation/rds.py @@ -2,69 +2,87 @@ # SPDX-License-Identifier: Apache-2.0 from simtest.remediation_test import RemediationTest -def run_make_rds_snapshot_private(remediation, account, region): - print('This test makes an RDS snapshot private.\n') +def run_make_rds_snapshot_private(remediation, account, region): + print("This test makes an RDS snapshot private.\n") - print('WARNING: This test may result in a Sev 2!\n') - input('Press ENTER to confirm that you read the warning.') + print("WARNING: This test may result in a Sev 2!\n") + input("Press ENTER to confirm that you read the warning.") - print('Manual Setup') - print('============\n') - print('This test requires an unencrypted RDS database') - print('1) Create an unencrypted RDS snapshot from a cluster with NO CUSTOMER OR SENSITIVE DATA.') - print('2) Make the snapshot public (Actions->Share Snapshot)') - public_snapshot = input('\nName of public snapshot?: ') + print("Manual Setup") + print("============\n") + print("This test requires an unencrypted RDS database") + print( + "1) Create an unencrypted RDS snapshot from a cluster with NO CUSTOMER OR SENSITIVE DATA." + ) + print("2) Make the snapshot public (Actions->Share Snapshot)") + public_snapshot = input("\nName of public snapshot?: ") - test = RemediationTest(remediation, wrap_it_in_findings=True) + test = RemediationTest(remediation, account, wrap_it_in_findings=True) - test.test_json['detail']['findings'][0]['Resources'][0]['Id'] = f'arn:aws:rds:{region}:{account}:cluster-snapshot:{public_snapshot}' + test.test_json["detail"]["findings"][0]["Resources"][0][ + "Id" + ] = f"arn:aws:rds:{region}:{account}:cluster-snapshot:{public_snapshot}" test.run() - print('\nVERIFICATION\n============\n') - print(f'1) In RDS, verify that the snapshot is not public.') - -def run_enable_enhanced_monitoring_on_rds_instance(remediation, account, region): + print("\nVERIFICATION\n============\n") + print("1) In RDS, verify that the snapshot is not public.") - print('Simulate AFSBP RDS.6 Findings\n') - print('This test enables enhanced monitoring on an RDS cluster.\n') - - print('Manual Setup') - print('============\n') - print('1) Select an RDS database cluster to test with. Be sure to deselect Enable Encryption under Additional Configuration when you create the cluster to test with.') - print('2) Disable enhanced monitoring for a database instance (database->Modify->Advanced configuration->Monitoring') - print('3) Get the Resource ID from the database (not cluster) Resource ID on the Configuration Tab') - dbi_resourceid = input('\nResource ID? (db-xxxxxxxxxxx): ') +def run_enable_enhanced_monitoring_on_rds_instance(remediation, account, region): + print("Simulate AWS FSBP RDS.6 Findings\n") + + print("This test enables enhanced monitoring on an RDS cluster.\n") + + print("Manual Setup") + print("============\n") + print( + "1) Select an RDS database cluster to test with. Be sure to deselect Enable Encryption under Additional Configuration when you create the cluster to test with." + ) + print( + "2) Disable enhanced monitoring for a database instance (database->Modify->Advanced configuration->Monitoring" + ) + print( + "3) Get the Resource ID from the database (not cluster) Resource ID on the Configuration Tab" + ) + dbi_resourceid = input("\nResource ID? (db-xxxxxxxxxxx): ") test = RemediationTest(remediation, account, wrap_it_in_findings=True) - test.test_json['detail']['findings'][0]['Resources'][0]['Details']['AwsRdsDbInstance']['DbiResourceId'] = dbi_resourceid + test.test_json["detail"]["findings"][0]["Resources"][0]["Details"][ + "AwsRdsDbInstance" + ]["DbiResourceId"] = dbi_resourceid test.run() - print('\nVERIFICATION\n============\n') - print(f'1) In RDS, verify that enhanced monitoring is enabled.') + print("\nVERIFICATION\n============\n") + print("1) In RDS, verify that enhanced monitoring is enabled.") -def run_enable_rds_cluster_deletion_protection(remediation, account, region): - print('Simulate AFSBP RDS.7 Findings\n') +def run_enable_rds_cluster_deletion_protection(remediation, account, region): + print("Simulate AWS FSBP RDS.7 Findings\n") - print('This test enables termination protection.\n') + print("This test enables termination protection.\n") - print('Manual Setup') - print('============\n') - print('1) Select an RDS cluster to test with.') - print('2) Disable Termination Protection for the RDS cluster (Modify button, Deletion protection)') - print('3) Get the Cluster Resource ID from the cluster (not database) Configuration Tab') - cluster_resourceid = input('\nCluster Resource Id? (cluster-xxxxxxxxxxxx): ') + print("Manual Setup") + print("============\n") + print("1) Select an RDS cluster to test with.") + print( + "2) Disable Termination Protection for the RDS cluster (Modify button, Deletion protection)" + ) + print( + "3) Get the Cluster Resource ID from the cluster (not database) Configuration Tab" + ) + cluster_resourceid = input("\nCluster Resource Id? (cluster-xxxxxxxxxxxx): ") test = RemediationTest(remediation, account, wrap_it_in_findings=True) - test.test_json['detail']['findings'][0]['Resources'][0]['Details']['AwsRdsDbCluster']['DbClusterResourceId'] = cluster_resourceid + test.test_json["detail"]["findings"][0]["Resources"][0]["Details"][ + "AwsRdsDbCluster" + ]["DbClusterResourceId"] = cluster_resourceid test.run() - print('\nVERIFICATION\n============\n') - print(f'1) In RDS, verify that termination protection is enabled.') + print("\nVERIFICATION\n============\n") + print("1) In RDS, verify that termination protection is enabled.") diff --git a/simtest/simtest/remediation/s3.py b/simtest/simtest/remediation/s3.py index 07739260..c3b8816e 100644 --- a/simtest/simtest/remediation/s3.py +++ b/simtest/simtest/remediation/s3.py @@ -1,44 +1,49 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 from simtest.remediation_test import RemediationTest -from botocore.exceptions import ClientError -def run_s3_block_public_access(remediation, account, region): - print('Test setting S3 public access block at the account level.\n') +def run_s3_block_public_access(remediation, account, region): + print("Test setting S3 public access block at the account level.\n") - print('Manual Setup') - print('============\n') - print('1) Go to S3 in the console') - print('2) Go to Block Public Access settings for this account') - print('3) Edit settings and uncheck all boxes') - print('4) Save settings') + print("Manual Setup") + print("============\n") + print("1) Go to S3 in the console") + print("2) Go to Block Public Access settings for this account") + print("3) Edit settings and uncheck all boxes") + print("4) Save settings") - test = RemediationTest(remediation, wrap_it_in_findings=True) - test.test_json['detail']['findings'][0]['Resources'][0]['Id'] = 'AWS::::Account:' + account + test = RemediationTest(remediation, account, wrap_it_in_findings=True) + test.test_json["detail"]["findings"][0]["Resources"][0]["Id"] = ( + "AWS::::Account:" + account + ) test.run() - print('\nVERIFICATION\n============\n') - print(f'1) In S3, verify account-level public access blocks are enabled.') + print("\nVERIFICATION\n============\n") + print("1) In S3, verify account-level public access blocks are enabled.") -def run_s3_block_public_bucket_access(remediation, account, region): - print('Test setting S3 public access block at the account level.\n') +def run_s3_block_public_bucket_access(remediation, account, region): + print("Test setting S3 public access block at the account level.\n") - print('Manual Setup') - print('============\n') - print('1) Go to S3 in the console') - print('2) Choose a bucket to test with and open "Block public access" settings in "Permissions"') - print('3) Edit settings and uncheck all boxes') - print('4) Save settings') + print("Manual Setup") + print("============\n") + print("1) Go to S3 in the console") + print( + '2) Choose a bucket to test with and open "Block public access" settings in "Permissions"' + ) + print("3) Edit settings and uncheck all boxes") + print("4) Save settings") - test_bucket = input('\nBucket name?: ') + test_bucket = input("\nBucket name?: ") test = RemediationTest(remediation, account, wrap_it_in_findings=True) - test.test_json['detail']['findings'][0]['Resources'][0]['Id'] = f'arn:aws:s3:::{test_bucket}' + test.test_json["detail"]["findings"][0]["Resources"][0][ + "Id" + ] = f"arn:aws:s3:::{test_bucket}" test.run() - print('\nVERIFICATION\n============\n') - print(f'1) In S3, verify bucket {test_bucket} public access blocks are enabled.') + print("\nVERIFICATION\n============\n") + print(f"1) In S3, verify bucket {test_bucket} public access blocks are enabled.") diff --git a/simtest/simtest/remediation/vpc.py b/simtest/simtest/remediation/vpc.py index da03d455..3afcc23b 100644 --- a/simtest/simtest/remediation/vpc.py +++ b/simtest/simtest/remediation/vpc.py @@ -2,20 +2,25 @@ # SPDX-License-Identifier: Apache-2.0 from simtest.remediation_test import RemediationTest -def run_enable_vpc_flow_logs(remediation, account, region): - print('This test enables VPC Flow Logging for a VPC.\n') +def run_enable_vpc_flow_logs(remediation, account, region): + print("This test enables VPC Flow Logging for a VPC.\n") - print('SETUP\n=====\n') - print('1) Use the default VPC (or create a new VPC)\n') + print("SETUP\n=====\n") + print("1) Use the default VPC (or create a new VPC)\n") test = RemediationTest(remediation, account, wrap_it_in_findings=True) # Alter the test data - vpc_id = input('From the console, get the VPC ID of a VPC. This test will enable VPC Flow Logs: ') - test.test_json['detail']['findings'][0]['Resources'][0]['Id'] = "arn:aws:ec2:" + \ - region + ":" + account + ":vpc/" + vpc_id + vpc_id = input( + "From the console, get the VPC ID of a VPC. This test will enable VPC Flow Logs: " + ) + test.test_json["detail"]["findings"][0]["Resources"][0]["Id"] = ( + "arn:aws:ec2:" + region + ":" + account + ":vpc/" + vpc_id + ) test.run() - print('\nVerify that flow logs are enabled for the VPC. If it was already enabled, check the lambda logs for errors. Optionally, disable flow logs and rerun this test.') + print( + "\nVerify that flow logs are enabled for the VPC. If it was already enabled, check the lambda logs for errors. Optionally, disable flow logs and rerun this test." + ) diff --git a/simtest/simtest/remediation_test.py b/simtest/simtest/remediation_test.py index ae3f6a65..dff00aa8 100644 --- a/simtest/simtest/remediation_test.py +++ b/simtest/simtest/remediation_test.py @@ -1,45 +1,47 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -from simtest.orchestrator import get_orchestrator, get_session -import sys import json +from typing import Any + +from layer.sechub_findings import Finding + +from simtest.boto_session import get_session +from simtest.orchestrator import get_orchestrator -sys.path.append('../source/LambdaLayers') -from sechub_findings import Finding class ControlTest: def __init__(self): - self.standard = '' - self.control = '' - self.description = '' + self.standard = "" + self.control = "" + self.description = "" self.test_json = {} self.orchestrator = get_orchestrator() - def load_json(self, finding_json_filename, wrap_it_in_findings = False): + def load_json(self, finding_json_filename, wrap_it_in_findings=False): self.test_json = read_remediation_json(finding_json_filename) if wrap_it_in_findings: self.test_json = wrap_in_findings(self.test_json) def create_finding(self): - self.finding = Finding(self.test_json['detail']['findings'][0]) + self.finding = Finding(self.test_json["detail"]["findings"][0]) return def print_heading(self, description): - print('=' * 80) - print(f'Simulate {self.finding.standard_shortname} {self.finding.standard_control} Findings\n') + print("=" * 80) + print( + f"Simulate {self.finding.standard_shortname} {self.finding.standard_control} Findings\n" + ) print(self.finding.title) - print('-' * len(self.finding.title) + '\n') - print(self.finding.description + '\n') - print(self.finding.remediation_url + '\n') - print('TEST\n----') - print(f'{description}\n') + print("-" * len(self.finding.title) + "\n") + print(self.finding.description + "\n") + print(self.finding.remediation_url + "\n") + print("TEST\n----") + print(f"{description}\n") return def print_verification_instructions(self, instructions): - - print('VERIFICATION\n------------') + print("VERIFICATION\n------------") if type(instructions) is list: - for line in instructions: print(line) else: @@ -47,7 +49,7 @@ def print_verification_instructions(self, instructions): print() def print_prep_instructions(self, instructions): - print('PREPARATION\n-----------') + print("PREPARATION\n-----------") if type(instructions) is list: for line in instructions: print(line) @@ -56,23 +58,26 @@ def print_prep_instructions(self, instructions): print() def run(self): - continue_answer = input('Press enter to run the test or Q to quit: ') - if continue_answer.lower() == 'q': - print('CANCELLED.') + continue_answer = input("Press enter to run the test or Q to quit: ") + if continue_answer.lower() == "q": + print("CANCELLED.") exit() self.orchestrator.invoke(self.test_json) + class RemediationTest: - def __init__(self, remediation, account, wrap_it_in_findings = False): + def __init__(self, remediation, account, wrap_it_in_findings=False): self.remediation = remediation self.test_json = read_remediation_json(self.remediation) if wrap_it_in_findings: self.test_json = wrap_in_findings(self.test_json) self.orchestrator = get_orchestrator() - self.test_json['detail']['findings'][0]['AwsAccountId'] = account - self.test_json['detail']['findings'][0]['Resources'][0]['Region'] = self.orchestrator.get_region() + self.test_json["detail"]["findings"][0]["AwsAccountId"] = account + self.test_json["detail"]["findings"][0]["Resources"][0][ + "Region" + ] = self.orchestrator.get_region() # If submitting a finding in the orchestrator account, we can substitute the config rule ID # Otherwise, the config rule ID must be substituted manually with the ID from the finding account @@ -81,22 +86,27 @@ def __init__(self, remediation, account, wrap_it_in_findings = False): def _substitute_config_rules(self): rule_store = ConfigRuleStore() - finding = self.test_json['detail']['findings'][0] + finding = self.test_json["detail"]["findings"][0] resource_index = 0 - type_key = f'RelatedAWSResources:{str(resource_index)}/type' - name_key = f'RelatedAWSResources:{str(resource_index)}/name' - while finding.get('ProductFields', {}).get(type_key, None): - if (finding['ProductFields'][type_key] == 'AWS::Config::ConfigRule'): - rule_name = finding['ProductFields'][name_key] - rule_name_prefix = rule_name[0:len(rule_name) - len(rule_name.split('-')[-1])] - finding['ProductFields'][name_key] = rule_store.get_rule_name_from_prefix(rule_name_prefix) + type_key = f"RelatedAWSResources:{str(resource_index)}/type" + name_key = f"RelatedAWSResources:{str(resource_index)}/name" + while finding.get("ProductFields", {}).get(type_key, None): + if finding["ProductFields"][type_key] == "AWS::Config::ConfigRule": + rule_name = finding["ProductFields"][name_key] + rule_name_prefix = rule_name[ + 0 : len(rule_name) - len(rule_name.split("-")[-1]) + ] + finding["ProductFields"][name_key] = ( + rule_store.get_rule_name_from_prefix(rule_name_prefix) + ) resource_index = resource_index + 1 - type_key = f'RelatedAWSResources:{str(resource_index)}/type' - name_key = f'RelatedAWSResources:{str(resource_index)}/name' + type_key = f"RelatedAWSResources:{str(resource_index)}/type" + name_key = f"RelatedAWSResources:{str(resource_index)}/name" def run(self): self.orchestrator.invoke(self.test_json) + class ConfigRuleStore: def __init__(self): self._session = get_session() @@ -104,24 +114,25 @@ def __init__(self): def get_rule_name_from_prefix(self, prefix): rules = self._list_rules() for rule in rules: - if rule['ConfigRuleName'].startswith(prefix): - return rule['ConfigRuleName'] + if rule["ConfigRuleName"].startswith(prefix): + return rule["ConfigRuleName"] return None # TODO can cache locally by profile, these don't change often def _list_rules(self): - config_client = self._session.client('config') + config_client = self._session.client("config") response = config_client.describe_config_rules() - rules = response['ConfigRules'] - token = response.get('NextToken', None) + rules = response["ConfigRules"] + token = response.get("NextToken", None) while token: - response = config_client.describe_config_rules(NextToken = token) - rules.extend(response['ConfigRules']) - token = response.get('NextToken', None) + response = config_client.describe_config_rules(NextToken=token) + rules.extend(response["ConfigRules"]) + token = response.get("NextToken", None) return rules + def wrap_in_findings(test_json): - wrapper = { + wrapper: Any = { "version": "0", "id": "609185ea-be02-2b86-4187-ce81f45b82a9", "detail-type": "Security Hub Findings - Custom Action", @@ -129,19 +140,18 @@ def wrap_in_findings(test_json): "account": "111111111111", "time": "2020-06-24T20:06:09Z", "region": "us-east-2", - "resources": [ - "arn:aws:securityhub:us-east-2:111111111111:action/custom/foo" - ], + "resources": ["arn:aws:securityhub:us-east-2:111111111111:action/custom/foo"], "detail": { "actionName": "foo", "actionDescription": "foo bar baz", - "findings": [] - } + "findings": [], + }, } - wrapper['detail']['findings'].append(test_json) + wrapper["detail"]["findings"].append(test_json) return wrapper + def read_remediation_json(remediation): sample_json = "./simdata/" + remediation + ".json" - fh = open(sample_json, mode='r') + fh = open(sample_json, mode="r") return json.loads(fh.read()) diff --git a/simtest/simulate.py b/simtest/simulate.py index a03bab52..c6197923 100644 --- a/simtest/simulate.py +++ b/simtest/simulate.py @@ -1,12 +1,14 @@ #!/usr/bin/env python3 # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -from simtest.controls import testIdByStandard -from simtest.orchestrator import create_orchestrator -from simtest.boto_session import create_session import argparse import os +from simtest.boto_session import create_session +from simtest.controls import testIdByStandard +from simtest.orchestrator import create_orchestrator + + def main(): args = create_argument_parser().parse_args() resolve_missing_args(args) @@ -16,72 +18,79 @@ def main(): initiate_remediation(args.standard, args.remediation, args.account, args.region) + def get_profile(): try: - return os.environ['AWS_PROFILE'] + return os.environ["AWS_PROFILE"] except Exception as e: print(e) usage() exit() + def usage(): - print('Run this after assuming role (using isengardcli) in the Security Hub Administor account. Use isengardcli credentials to get temporary credentials before running this script') + print( + "Run this after assuming role (using isengardcli) in the Security Hub Administor account. Use isengardcli credentials to get temporary credentials before running this script" + ) + def initiate_remediation(standard, control, account, region): if control in testIdByStandard[standard]: print("-" * 80) - print(f'Testing {standard} {control}') + print(f"Testing {standard} {control}") testIdByStandard[standard][control](account, region) - print('\nRemediation was initiated. Verify that it completed successfully.\n') + print("\nRemediation was initiated. Verify that it completed successfully.\n") else: print("Remediation invalid: " + control) + def resolve_missing_args(args): if not args.account: - args.account = os.getenv('sim_account', None) + args.account = os.getenv("sim_account", None) if not args.account: - args.account = input('Account ID to test? ') + args.account = input("Account ID to test? ") if not args.standard: - args.standard = input('Security Standard to test? ') + args.standard = input("Security Standard to test? ") if not args.region: args.region = args.orch_region + def create_argument_parser(): argument_parser = argparse.ArgumentParser() argument_parser.add_argument( - '--region', - '-r', - required = True, - dest = 'orch_region', - help = 'Region where findings are to be sent.') + "--region", + "-r", + required=True, + dest="orch_region", + help="Region where findings are to be sent.", + ) argument_parser.add_argument( - '--standard', - '-s', - required = True, - help = 'Security Standard (cis, afsbp, or pci)') + "--standard", "-s", required=True, help="Security Standard (cis, afsbp, or pci)" + ) argument_parser.add_argument( - '--control', - '-c', - required = True, - dest = 'remediation', - help = 'Control to test. Ex. 2.9') + "--control", + "-c", + required=True, + dest="remediation", + help="Control to test. Ex. 2.9", + ) argument_parser.add_argument( - '--account', - '-a', - required = False, - help = 'Account to test') + "--account", "-a", required=False, help="Account to test" + ) argument_parser.add_argument( - '--finding-region', - '-f', - required = False, - dest = 'region', - help = 'Region in which finding is to be remediated. Defaults to --region') + "--finding-region", + "-f", + required=False, + dest="region", + help="Region in which finding is to be remediated. Defaults to --region", + ) return argument_parser -if (__name__ == '__main__'): + +if __name__ == "__main__": main() diff --git a/deployment/manifest.yaml b/solution-manifest.yaml similarity index 82% rename from deployment/manifest.yaml rename to solution-manifest.yaml index 3cbf575e..63a713d8 100644 --- a/deployment/manifest.yaml +++ b/solution-manifest.yaml @@ -1,6 +1,6 @@ id: SO0111 name: security-hub-automated-response-and-remediation -version: 2.0.1 +version: 2.1.0 cloudformation_templates: - template: aws-sharr-deploy.template main_template: true @@ -18,3 +18,7 @@ cloudformation_templates: - template: playbooks/PCI321Stack.template - template: playbooks/SCMemberStack.template - template: playbooks/SCStack.template + - template: playbooks/NIST80053MemberStack.template + - template: playbooks/NIST80053Stack.template +build_environment: + build_image: aws/codebuild/standard:7.0 \ No newline at end of file diff --git a/sonar-project.properties b/sonar-project.properties index ebb90b8a..db81d5f7 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -7,10 +7,11 @@ sonar.exclusions = \ **/test/**/*, \ source/jest.config.ts, \ source/**/*.test.ts, \ - source/coverage/**/* + source/coverage/**/*, \ + source/solution_deploy/cdk.out/*, \ sonar.tests = \ - source/LambdaLayers/test/, \ + source/layer/test/, \ source/Orchestrator/test/, \ source/playbooks/AFSBP/test/, \ source/playbooks/AFSBP/ssmdocs/scripts/test/, \ @@ -18,11 +19,10 @@ sonar.tests = \ source/playbooks/CIS140/test/, \ source/playbooks/common/test/, \ source/playbooks/NEWPLAYBOOK/test/, \ - source/playbooks/NEWPLAYBOOK/ssmdocs/scripts/test/, \ + source/playbooks/NIST80053/test/, \ source/playbooks/PCI321/test/, \ source/playbooks/PCI321/ssmdocs/scripts/test/, \ source/playbooks/SC/test/, \ - source/playbooks/SC/ssmdocs/scripts/test/, \ source/remediation_runbooks/scripts/test/, \ source/solution_deploy/source/test/, \ source/test/ diff --git a/source/.prettierignore b/source/.prettierignore new file mode 100644 index 00000000..849ddff3 --- /dev/null +++ b/source/.prettierignore @@ -0,0 +1 @@ +dist/ diff --git a/source/.prettierrc b/source/.prettierrc index 3f584f60..368c2f1a 100644 --- a/source/.prettierrc +++ b/source/.prettierrc @@ -1,4 +1,4 @@ { - "printWidth": 120, - "singleQuote": true + "printWidth": 120, + "singleQuote": true } diff --git a/source/LambdaLayers/awsapi_cached_client.py b/source/LambdaLayers/awsapi_cached_client.py deleted file mode 100644 index 79d35f9a..00000000 --- a/source/LambdaLayers/awsapi_cached_client.py +++ /dev/null @@ -1,120 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 -import os -import boto3 -from botocore.config import Config - -class AWSCachedClient: - """ - Maintains a hash of AWS API Client connections by region and service - """ - account = '' - region = '' - client = {} - solution_id = '' - solution_version = 'undefined' - - def __init__(self, region): - """ - Create a Boto3 Client object. Region is used for operations such - as retrieving account number, and as the default for get_connection. - """ - self.solution_id = os.getenv('SOLUTION_ID', 'SO0111') - self.solution_version = os.getenv('SOLUTION_VERSION', 'undefined') - self.region = region - self.boto_config = Config( - user_agent_extra=f'AwsSolution/{self.solution_id}/{self.solution_version}', - retries ={ - 'max_attempts': 10, - 'mode': 'standard' - } - ) - - self.account = self._get_local_account_id() - - def get_connection(self, service, region=None): - """Connect to AWS api""" - - if not region: - region = self.region - - if service not in self.client: - self.client[service] = {} - - if region not in self.client[service]: - self.client[service][region] = boto3.client(service, region_name=region, config=self.boto_config) - - return self.client[service][region] - - def _get_local_account_id(self): - """ - get local account info - """ - aws_account_id = self.get_connection('sts',self.region).get_caller_identity().get('Account') - return aws_account_id - -class MissingAssumedRole(Exception): - pass - -class BotoSession: - client_props = {} - resource_props = {} - STS = None - partition = None - session = None - target = None - role = None - - def create_session(self): - self.STS = None - # Local or remote? Who am I? - self.STS = boto3.client('sts', config=self.boto_config) - if not self.target: - self.target = self.STS.get_caller_identity()['Account'] - remote_account = self.STS.assume_role( - RoleArn='arn:' + self.partition + ':iam::' + self.target + ':role/' + self.role, - RoleSessionName="sechub_admin" - ) - self.session = boto3.session.Session( - aws_access_key_id=remote_account['Credentials']['AccessKeyId'], - aws_secret_access_key=remote_account['Credentials']['SecretAccessKey'], - aws_session_token=remote_account['Credentials']['SessionToken'] - ) - - boto3.setup_default_session() - - def __init__(self, account=None, role=None, partition=None): - """ - Create a session - account: None or the target account - """ - # Default partition to 'aws' - if not partition: - partition = 'aws' - self.target = account - if not role: - raise MissingAssumedRole - else: - self.role = role - self.session = None - self.partition = os.getenv('AWS_PARTITION', partition) - self.solution_id = os.getenv('SOLUTION_ID', 'SO0111') - self.solution_version = os.getenv('SOLUTION_VERSION', 'undefined') - self.boto_config = Config( - user_agent_extra=f'AwsSolution/{self.solution_id}/{self.solution_version}', - retries ={ - 'max_attempts': 10, - 'mode': 'standard' - } - ) - self.create_session() - - def client(self, name, **kwargs): - - self.client_props[name] = self.session.client(name, config=self.boto_config, **kwargs) - return self.client_props[name] - - def resource(self, name, **kwargs): - - self.resource_props[name] = self.session.resource(name, config=self.boto_config, **kwargs) - return self.resource_props[name] diff --git a/source/LambdaLayers/metrics.py b/source/LambdaLayers/metrics.py deleted file mode 100644 index 63efdc9e..00000000 --- a/source/LambdaLayers/metrics.py +++ /dev/null @@ -1,146 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 -import os -import json -import uuid -from urllib.request import Request, urlopen -from datetime import datetime -import boto3 -from botocore.exceptions import ClientError -import awsapi_cached_client - -class Metrics(object): - - event_type = '' - send_metrics_option = 'No' - solution_version = '' - solution_uuid = None - session = None - region = None - ssm_client = None - metrics_parameter_name = '/Solutions/SO0111/anonymous_metrics_uuid' - - def __init__(self, event_type): - self.session = boto3.session.Session() - self.region = self.session.region_name - - self.ssm_client = self.connect_to_ssm() - - if not self.send_anonymous_metrics_enabled(): - return - - self.event_type = event_type - - self.__get_solution_uuid() - - try: - solution_version_parm = '/Solutions/SO0111/version' - solution_version_from_ssm = self.ssm_client.get_parameter( - Name=solution_version_parm - ).get('Parameter').get('Value') - except ClientError as ex: - exception_type = ex.response['Error']['Code'] - if exception_type == 'ParameterNotFound': - solution_version_from_ssm = 'unknown' - else: - print(ex) - except Exception as e: - print(e) - raise - - self.solution_version = solution_version_from_ssm - - def send_anonymous_metrics_enabled(self): - is_enabled = False # default value - try: - ssm_parm = '/Solutions/SO0111/sendAnonymousMetrics' - send_anonymous_metrics_from_ssm = self.ssm_client.get_parameter( - Name=ssm_parm - ).get('Parameter').get('Value').lower() - - if send_anonymous_metrics_from_ssm != 'yes' and send_anonymous_metrics_from_ssm != 'no': - print(f'Unexpected value for {ssm_parm}: {send_anonymous_metrics_from_ssm}. Defaulting to "no"') - elif send_anonymous_metrics_from_ssm == 'yes': - is_enabled = True - - except Exception as e: - print(e) - - return is_enabled - - def connect_to_ssm(self): - try: - if not self.ssm_client: - new_ssm_client = awsapi_cached_client.AWSCachedClient(self.region).get_connection('ssm') - return new_ssm_client - except Exception as e: - print(f'Could not connect to ssm: {str(e)}') - - def __update_solution_uuid(self, new_uuid): - self.ssm_client.put_parameter( - Name=self.metrics_parameter_name, - Description='Unique Id for anonymous metrics collection', - Value=new_uuid, - Type='String' - ) - - def __get_solution_uuid(self): - try: - solution_uuid_from_ssm = self.ssm_client.get_parameter( - Name=self.metrics_parameter_name - ).get('Parameter').get('Value') - self.solution_uuid = solution_uuid_from_ssm - except ClientError as ex: - exception_type = ex.response['Error']['Code'] - if exception_type == 'ParameterNotFound': - self.solution_uuid = str(uuid.uuid4()) - self.__update_solution_uuid(self.solution_uuid) - else: - print(ex) - raise - except Exception as e: - print(e) - raise - - def get_metrics_from_finding(self, finding): - - try: - if finding is not None: - metrics_data = { - 'generator_id': finding.get('GeneratorId'), - 'type': finding.get('Title'), - 'productArn': finding.get('ProductArn'), - 'finding_triggered_by': self.event_type, - 'region': self.region - } - else: - metrics_data = {} - return metrics_data - except Exception as excep: - print(excep) - return {} - - def send_metrics(self, metrics_data): - - try: - if metrics_data is not None and self.send_anonymous_metrics_enabled(): - usage_data = { - 'Solution': 'SO0111', - 'UUID': self.solution_uuid, - 'TimeStamp': str(datetime.utcnow().isoformat()), - 'Data': metrics_data, - 'Version': self.solution_version - } - print(f'Sending metrics data {json.dumps(usage_data)}') - self.post_metrics_to_api(usage_data) - - else: - return - except Exception as excep: - print(excep) - - def post_metrics_to_api(self, request_data): - url = 'https://metrics.awssolutionsbuilder.com/generic' - req = Request(url, method='POST', data=bytes(json.dumps( - request_data), encoding='utf8'), headers={'Content-Type': 'application/json'}) - urlopen(req) # nosec diff --git a/source/LambdaLayers/test/test_api_cached_client.py b/source/LambdaLayers/test/test_api_cached_client.py deleted file mode 100644 index c0382972..00000000 --- a/source/LambdaLayers/test/test_api_cached_client.py +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 -from botocore.stub import Stubber, ANY -import pytest -from awsapi_cached_client import AWSCachedClient - - -def test_create_client(): - AWS = AWSCachedClient('us-east-1') - - AWS.get_connection('sns') # in us-east-1 - my_account = AWS.account - assert my_account - assert 'sns' in AWS.client - assert 'us-east-1' in AWS.client['sns'] - AWS.get_connection('ec2') - assert 'ec2' in AWS.client - assert 'us-east-1' in AWS.client['ec2'] - AWS.get_connection('iam','ap-northeast-1') - assert 'iam' in AWS.client - assert 'ap-northeast-1' in AWS.client['iam'] diff --git a/source/LambdaLayers/test/test_metrics.py b/source/LambdaLayers/test/test_metrics.py deleted file mode 100644 index a48c03d8..00000000 --- a/source/LambdaLayers/test/test_metrics.py +++ /dev/null @@ -1,284 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 -import os -import boto3 -from botocore.stub import Stubber -import pytest -from pytest_mock import mocker -from metrics import Metrics -from . import file_utilities as utils - -test_data = 'test/test_json_data/' -def get_region(): - return os.getenv('AWS_DEFAULT_REGION') - -mock_ssm_get_parameter_uuid = { - "Parameter": { - "Name": "/Solutions/SO0111/anonymous_metrics_uuid", - "Type": "String", - "Value": "11111111-1111-1111-1111-111111111111", - "Version": 1, - "LastModifiedDate": "2021-02-25T12:58:50.591000-05:00", - "ARN": f'arn:aws:ssm:{get_region()}:111111111111:parameter/Solutions/SO0111/anonymous_metrics_uuid', - "DataType": "text" - } -} -mock_ssm_get_parameter_version = { - "Parameter": { - "Name": "/Solutions/SO0111/solution_version", - "Type": "String", - "Value": "v1.2.0TEST", - "Version": 1, - "LastModifiedDate": "2021-02-25T12:58:50.591000-05:00", - "ARN": f'arn:aws:ssm:{get_region()}1:111111111111:parameter/Solutions/SO0111/solution_version', - "DataType": "text" - } -} - -mock_ssm_get_parameter_sendmetrics_yes = { - "Parameter": { - "Name": "/Solutions/SO0111/sendAnonymousMetrics", - "Type": "String", - "Value": "Yes", - "Version": 1, - "LastModifiedDate": "2021-02-25T12:58:50.591000-05:00", - "ARN": f'arn:aws:ssm:{get_region()}:111111111111:parameter/Solutions/SO0111/sendAnonymousMetrics', - "DataType": "text" - } -} - -mock_ssm_get_parameter_sendmetrics_no = { - "Parameter": { - "Name": "/Solutions/SO0111/sendAnonymousMetrics", - "Type": "String", - "Value": "No", - "Version": 1, - "LastModifiedDate": "2021-02-25T12:58:50.591000-05:00", - "ARN": f'arn:aws:ssm:{get_region()}:111111111111:parameter/Solutions/SO0111/sendAnonymousMetrics', - "DataType": "text" - } -} - -mock_ssm_get_parameter_sendmetrics_badvalue = { - "Parameter": { - "Name": "/Solutions/SO0111/sendAnonymousMetrics", - "Type": "String", - "Value": "slartibartfast", - "Version": 1, - "LastModifiedDate": "2021-02-25T12:58:50.591000-05:00", - "ARN": f'arn:aws:ssm:{get_region()}:111111111111:parameter/Solutions/SO0111/sendAnonymousMetrics', - "DataType": "text" - } -} - -#------------------------------------------------------------------------------ -# This test verifies that the metrics object is constructed correctly -#------------------------------------------------------------------------------ -def test_metrics_construction(mocker): - - ssmc = boto3.client('ssm', region_name = get_region()) - ssmc_s = Stubber(ssmc) - ssmc_s.add_response( - 'get_parameter', - mock_ssm_get_parameter_sendmetrics_yes - ) - ssmc_s.add_response( - 'get_parameter', - mock_ssm_get_parameter_uuid - ) - ssmc_s.add_response( - 'get_parameter', - mock_ssm_get_parameter_version - ) - ssmc_s.activate() - - mocker.patch('metrics.Metrics.connect_to_ssm', return_value=ssmc) - - metrics = Metrics("unit-test") - - assert metrics.solution_uuid == "11111111-1111-1111-1111-111111111111" - assert metrics.solution_version == "v1.2.0TEST" - -#------------------------------------------------------------------------------ -# This test verifies that event data is parsed correctly -#------------------------------------------------------------------------------ -def test_get_metrics_from_finding(mocker): - - expected_response = { - 'generator_id': 'arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/1.3', - 'type': '1.3 Ensure credentials unused for 90 days or greater are disabled', - 'productArn': 'arn:aws:securityhub:' + get_region() + '::product/aws/securityhub', - 'finding_triggered_by': 'unit-test', - 'region': mocker.ANY - } - - finding = utils.load_test_data(test_data + 'CIS-1.3.json', get_region()).get('detail').get('findings')[0] - - ssmc = boto3.client('ssm',region_name = get_region()) - ssmc_s = Stubber(ssmc) - ssmc_s.add_response( - 'get_parameter', - mock_ssm_get_parameter_sendmetrics_yes - ) - ssmc_s.add_response( - 'get_parameter', - mock_ssm_get_parameter_uuid - ) - ssmc_s.add_response( - 'get_parameter', - mock_ssm_get_parameter_version - ) - ssmc_s.activate() - - mocker.patch('metrics.Metrics.connect_to_ssm', return_value=ssmc) - - metrics = Metrics("unit-test") - - assert metrics.get_metrics_from_finding(finding) == expected_response - -#------------------------------------------------------------------------------ -# This test verifies that sendAnonymousMetrics defaults to no when the value is -# other than yes or no. -#------------------------------------------------------------------------------ -def test_validate_ambiguous_sendanonymousmetrics(mocker): - - ssmc = boto3.client('ssm', region_name = get_region()) - ssmc_s = Stubber(ssmc) - ssmc_s.add_response( - 'get_parameter', - mock_ssm_get_parameter_sendmetrics_badvalue - ) - ssmc_s.add_response( - 'get_parameter', - mock_ssm_get_parameter_uuid - ) - ssmc_s.add_response( - 'get_parameter', - mock_ssm_get_parameter_version - ) - ssmc_s.activate() - - mocker.patch('metrics.Metrics.connect_to_ssm', return_value=ssmc) - - metrics = Metrics("unit-test") - - assert metrics.send_anonymous_metrics_enabled() == False -#------------------------------------------------------------------------------ -# This test verifies that send_metrics will post metrics when enabled via ssm -#------------------------------------------------------------------------------ -def test_send_metrics(mocker): - - expected_response = { - 'Solution': 'SO0111', - 'UUID': '11111111-1111-1111-1111-111111111111', - 'TimeStamp': mocker.ANY, - 'Data': { - 'generator_id': 'arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/1.3', - 'type': '1.3 Ensure credentials unused for 90 days or greater are disabled', - 'productArn': mocker.ANY, - 'finding_triggered_by': 'unit-test', - 'region': mocker.ANY - }, - 'Version': 'v1.2.0TEST' - } - - finding = utils.load_test_data(test_data + 'CIS-1.3.json', get_region()).get('detail').get('findings')[0] - - ssmc = boto3.client('ssm',region_name=get_region()) - ssmc_s = Stubber(ssmc) - ssmc_s.add_response( - 'get_parameter', - mock_ssm_get_parameter_sendmetrics_yes - ) - ssmc_s.add_response( - 'get_parameter', - mock_ssm_get_parameter_uuid - ) - ssmc_s.add_response( - 'get_parameter', - mock_ssm_get_parameter_version - ) - ssmc_s.add_response( - 'get_parameter', - mock_ssm_get_parameter_sendmetrics_yes - ) - ssmc_s.activate() - - mocker.patch('metrics.Metrics.connect_to_ssm', return_value=ssmc) - - metrics = Metrics("unit-test") - metrics_data = metrics.get_metrics_from_finding(finding) - assert metrics_data == { - 'generator_id': 'arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/1.3', - 'type': '1.3 Ensure credentials unused for 90 days or greater are disabled', - 'productArn': f'arn:aws:securityhub:{get_region()}::product/aws/securityhub', - 'finding_triggered_by': 'unit-test', - 'region': get_region() - } - - send_metrics = mocker.patch('metrics.Metrics.post_metrics_to_api', return_value=None) - - metrics.send_metrics(metrics_data) - - send_metrics.assert_called_with(expected_response) - -#------------------------------------------------------------------------------ -# This test verifies that send_metrics takes the value from the SSM parameter -# WHEN METRICS ARE SENT. It does not assume that if the metrics object exists -# then send metrics is enabled. -#------------------------------------------------------------------------------ -def test_do_not_send_metrics(mocker): - - expected_response = { - 'Solution': 'SO0111', - 'UUID': '11111111-1111-1111-1111-111111111111', - 'TimeStamp': mocker.ANY, - 'Data': { - 'generator_id': 'arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/1.3', - 'type': '1.3 Ensure credentials unused for 90 days or greater are disabled', - 'productArn': mocker.ANY, - 'finding_triggered_by': 'unit-test', - 'region': mocker.ANY - }, - 'Version': 'v1.2.0TEST' - } - - finding = utils.load_test_data(test_data + 'CIS-1.3.json', get_region()).get('detail').get('findings')[0] - - ssmc = boto3.client('ssm',region_name=get_region()) - ssmc_s = Stubber(ssmc) - ssmc_s.add_response( - 'get_parameter', - mock_ssm_get_parameter_sendmetrics_yes - ) - ssmc_s.add_response( - 'get_parameter', - mock_ssm_get_parameter_uuid - ) - ssmc_s.add_response( - 'get_parameter', - mock_ssm_get_parameter_version - ) - ssmc_s.add_response( - 'get_parameter', - mock_ssm_get_parameter_sendmetrics_no - ) - ssmc_s.activate() - - mocker.patch('metrics.Metrics.connect_to_ssm', return_value=ssmc) - - metrics = Metrics("unit-test") - metrics_data = metrics.get_metrics_from_finding(finding) - assert metrics_data == { - 'generator_id': 'arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/1.3', - 'type': '1.3 Ensure credentials unused for 90 days or greater are disabled', - 'productArn': f'arn:aws:securityhub:{get_region()}::product/aws/securityhub', - 'finding_triggered_by': 'unit-test', - 'region': get_region() - } - - send_metrics = mocker.patch('metrics.Metrics.post_metrics_to_api', return_value=None) - - metrics.send_metrics(metrics_data) - - send_metrics.assert_not_called() diff --git a/source/LambdaLayers/test/test_sechub_findings.py b/source/LambdaLayers/test/test_sechub_findings.py deleted file mode 100644 index 85b0967c..00000000 --- a/source/LambdaLayers/test/test_sechub_findings.py +++ /dev/null @@ -1,283 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 -""" -Simple test to validate that the request format coming from the Cfn template -will turn into a valid API call. -""" -import json -import boto3 -from botocore.stub import Stubber -import pytest -from pytest_mock import mocker -import sechub_findings as findings -from logger import Logger -from applogger import LogHandler -import utils - -log_level = 'info' -logger = Logger(loglevel=log_level) -test_data = 'test/test_json_data/' - -my_session = boto3.session.Session() -my_region = my_session.region_name - - -#------------------------------------------------------------------------------ -# CIS v1.2.0 -#------------------------------------------------------------------------------ -def test_parse_cis_v120(mocker): - - test_data_in = open(test_data + 'CIS-1.3.json') - event = json.loads(test_data_in.read()) - test_data_in.close() - - ssmclient = boto3.client('ssm') - stubbed_ssm_client = Stubber(ssmclient) - stubbed_ssm_client.add_response( - 'get_parameter', - { - "Parameter": { - "Name": "/Solutions/SO0111/cis-aws-foundations-benchmark/1.2.0/shortname", - "Type": "String", - "Value": "CIS", - "Version": 1, - "LastModifiedDate": "2021-04-23T08:11:30.658000-04:00", - "ARN": f'arn:aws:ssm:{my_region}:111111111111:parameter/Solutions/SO0111/cis-aws-foundations-benchmark/1.2.0/shortname', - "DataType": "text" - } - }) - stubbed_ssm_client.add_client_error( - 'get_parameter','ParameterNotFound','The requested parameter does not exist' - ) - stubbed_ssm_client.add_response( - 'get_parameter', - { - "Parameter": { - "Name": "/Solutions/SO0111/cis-aws-foundations-benchmark/1.2.0", - "Type": "String", - "Value": "enabled", - "Version": 1, - "LastModifiedDate": "2021-04-23T08:12:13.893000-04:00", - "ARN": f'arn:aws:ssm:{my_region}:111111111111:parameter/Solutions/SO0111/cis-aws-foundations-benchmark/version', - "DataType": "text" - } - }) - stubbed_ssm_client.activate() - - mocker.patch('sechub_findings.get_ssm_connection', return_value=ssmclient) - - finding = findings.Finding(event['detail']['findings'][0]) - assert finding.details.get('Id') == event['detail']['findings'][0]['Id'] - assert finding.generator_id == 'arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/1.3' - assert finding.account_id == '111111111111' - assert finding.standard_name == 'cis-aws-foundations-benchmark' - assert finding.standard_shortname == 'CIS' - assert finding.standard_version == '1.2.0' - assert finding.standard_control == '1.3' - assert finding.standard_version_supported == 'True' - - stubbed_ssm_client.deactivate() - -#------------------------------------------------------------------------------ -# -#------------------------------------------------------------------------------ -def test_parse_bad_imported(): - test_file = open(test_data + 'CIS-bad.json') - event = json.loads(test_file.read()) - test_file.close() - - with pytest.raises(findings.InvalidFindingJson): - finding = findings.Finding(event['detail']['findings'][0]) - -#------------------------------------------------------------------------------ -# CIS v1.7.0 finding should show unsupported -#------------------------------------------------------------------------------ -def test_parse_unsupported_version(mocker): - test_data_in = open(test_data + 'CIS_unsupversion.json') - event = json.loads(test_data_in.read()) - test_data_in.close() - - ssmclient = boto3.client('ssm') - stubbed_ssm_client = Stubber(ssmclient) - - stubbed_ssm_client.add_response( - 'get_parameter', - { - "Parameter": { - "Name": "/Solutions/SO0111/cis-aws-foundations-benchmark/1.7.0/shortname", - "Type": "String", - "Value": "CIS", - "Version": 1, - "LastModifiedDate": "2021-04-23T08:11:30.658000-04:00", - "ARN": f'arn:aws:ssm:{my_region}:111111111111:parameter/Solutions/SO0111/cis-aws-foundations-benchmark/1.7.0/shortname', - "DataType": "text" - } - }) - - stubbed_ssm_client.add_client_error( - 'get_parameter','ParameterNotFound','The requested parameter does not exist' - ) - stubbed_ssm_client.activate() - - mocker.patch('sechub_findings.get_ssm_connection', return_value=ssmclient) - - finding = findings.Finding(event['detail']['findings'][0]) - - assert finding.details.get('Id') == event['detail']['findings'][0]['Id'] - assert finding.generator_id == 'arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.7.0/rule/1.6' - assert finding.account_id == '111111111111' - assert finding.standard_name == 'cis-aws-foundations-benchmark' - assert finding.standard_shortname == 'CIS' - assert finding.standard_version == '1.7.0' - assert finding.standard_control == '1.6' - assert finding.standard_version_supported == 'False' - - stubbed_ssm_client.deactivate() - -#------------------------------------------------------------------------------ -# AFSBP v1.0.0 -#------------------------------------------------------------------------------ -def test_parse_afsbp_v100(mocker): - - test_data_in = open(test_data + 'afsbp-ec2.7.json') - event = json.loads(test_data_in.read()) - test_data_in.close() - - ssmclient = boto3.client('ssm') - stubbed_ssm_client = Stubber(ssmclient) - - stubbed_ssm_client.add_response( - 'get_parameter', - { - "Parameter": { - "Name": "/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0/shortname", - "Type": "String", - "Value": "AFSBP", - "Version": 1, - "LastModifiedDate": "2021-04-23T08:11:30.658000-04:00", - "ARN": f'arn:aws:ssm:{my_region}:111111111111:parameter/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0/shortname', - "DataType": "text" - } - }) - stubbed_ssm_client.add_client_error( - 'get_parameter','ParameterNotFound','The requested parameter does not exist' - ) - stubbed_ssm_client.add_response( - 'get_parameter', - { - "Parameter": { - "Name": "/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0", - "Type": "String", - "Value": "enabled", - "Version": 1, - "LastModifiedDate": "2021-04-23T08:12:13.893000-04:00", - "ARN": f'arn:aws:ssm:us-{my_region}-1:111111111111:parameter/Solutions/SO0111/aws-foundational-security-best-practices/version', - "DataType": "text" - } - }) - stubbed_ssm_client.activate() - - mocker.patch('sechub_findings.get_ssm_connection', return_value=ssmclient) - - finding = findings.Finding(event['detail']['findings'][0]) - assert finding.details.get('Id') == event['detail']['findings'][0]['Id'] - assert finding.account_id == '111111111111' - assert finding.standard_name == 'aws-foundational-security-best-practices' - assert finding.standard_shortname == 'AFSBP' - assert finding.standard_version == '1.0.0' - assert finding.standard_control == 'EC2.7' - assert finding.standard_version_supported == 'True' - - stubbed_ssm_client.deactivate() - -#------------------------------------------------------------------------------ -# Security Standard not found -#------------------------------------------------------------------------------ -def test_undefined_security_standard(mocker): - - test_data_in = open(test_data + 'afsbp-ec2.7.json') - event = json.loads(test_data_in.read()) - test_data_in.close() - - event['detail']['findings'][0]['ProductFields']['StandardsControlArn'] = \ - "arn:aws:securityhub:::standards/aws-invalid-security-standard/v/1.2.3/ABC.1" - - ssmclient = boto3.client('ssm') - stubbed_ssm_client = Stubber(ssmclient) - - stubbed_ssm_client.add_client_error( - 'get_parameter','ParameterNotFound','The requested parameter does not exist' - ) - - stubbed_ssm_client.add_client_error( - 'get_parameter','ParameterNotFound','The requested parameter does not exist' - ) - - stubbed_ssm_client.add_client_error( - 'get_parameter','ParameterNotFound','The requested parameter does not exist' - ) - - stubbed_ssm_client.activate() - - mocker.patch('sechub_findings.get_ssm_connection', return_value=ssmclient) - - finding = findings.Finding(event['detail']['findings'][0]) - assert finding.details.get('Id') == event['detail']['findings'][0]['Id'] - assert finding.account_id == '111111111111' - assert finding.standard_name == 'aws-invalid-security-standard' - assert finding.standard_shortname == 'error' - assert finding.security_standard == 'notfound' - assert finding.standard_version == '1.2.3' - assert finding.standard_control == 'ABC.1' - assert finding.standard_version_supported == 'False' - - stubbed_ssm_client.deactivate() - -def test_security_control(mocker): - test_data_in = open(test_data + 'afsbp-ec2.7.json') - event = json.loads(test_data_in.read()) - test_data_in.close() - - event['detail']['findings'][0]['ProductFields']['StandardsControlArn'] = None - event['detail']['findings'][0]['Compliance']['SecurityControlId'] = 'EC2.7' - - ssmclient = boto3.client('ssm') - stubbed_ssm_client = Stubber(ssmclient) - - stubbed_ssm_client.add_response( - 'get_parameter', - { - "Parameter": { - "Name": "/Solutions/SO0111/security-controls/2.0.0/shortname", - "Type": "String", - "Value": "SC", - "Version": 1, - "LastModifiedDate": "2021-04-23T08:11:30.658000-04:00", - "ARN": f'arn:aws:ssm:{my_region}:111111111111:parameter/Solutions/SO0111/security-controls/2.0.0/shortname', - "DataType": "text" - } - }) - stubbed_ssm_client.add_client_error( - 'get_parameter','ParameterNotFound','The requested parameter does not exist' - ) - stubbed_ssm_client.add_response( - 'get_parameter', - { - "Parameter": { - "Name": "/Solutions/SO0111/security-controls/2.0.0/status", - "Type": "String", - "Value": "enabled", - "Version": 1, - "LastModifiedDate": "2021-04-23T08:12:13.893000-04:00", - "ARN": f'arn:aws:ssm:us-{my_region}-1:111111111111:parameter/Solutions/SO0111/security-controls/2.0.0/status', - "DataType": "text" - } - }) - stubbed_ssm_client.activate() - - finding = findings.Finding(event['detail']['findings'][0]) - assert finding.details.get('Id') == event['detail']['findings'][0]['Id'] - assert finding.account_id == '111111111111' - assert finding.standard_name == 'security-control' - assert finding.standard_version == '2.0.0' - assert finding.standard_control == 'EC2.7' diff --git a/source/LambdaLayers/test/test_utils.py b/source/LambdaLayers/test/test_utils.py deleted file mode 100644 index b88d3f08..00000000 --- a/source/LambdaLayers/test/test_utils.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 -from utils import resource_from_arn, partition_from_region - -def test_resource_from_arn(): - testarn1 = "arn:aws-us-gov:iam:us-gov-west-1:222222222222:root" - assert resource_from_arn(testarn1) == 'root' - testarn2 = "arn:aws-cn:s3:::doc-example-bucket" - assert resource_from_arn(testarn2) == 'doc-example-bucket' - testarn3 = "This is a non-arn string" - assert resource_from_arn(testarn3) == 'This is a non-arn string' - -def test_partition_from_region(): - assert partition_from_region('us-gov-west-1') == 'aws-us-gov' - assert partition_from_region('cn-north-1') == 'aws-cn' - # Note: does not validate region name. default expected - assert partition_from_region('foo') == 'aws' - assert partition_from_region('eu-west-1') == 'aws' diff --git a/source/LambdaLayers/utils.py b/source/LambdaLayers/utils.py deleted file mode 100644 index 98b59f5f..00000000 --- a/source/LambdaLayers/utils.py +++ /dev/null @@ -1,105 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 -import json -import re -import os -import boto3 -from awsapi_cached_client import AWSCachedClient -from botocore.exceptions import UnknownRegionError - -AWS_REGION = os.getenv('AWS_REGION', 'us-east-1') - -properties = ['status','message','executionid', -'affected_object','remediation_status','logdata', -'securitystandard','securitystandardversion','standardsupported', -'controlid','accountid','automationdocid', -'remediationrole','workflowdoc','workflowaccount', -'workflowrole','eventtype','resourceregion','workflow_data','executionaccount', 'executionregion'] - -class StepFunctionLambdaAnswer: - """ - Maintains a hash of AWS API Client connections by region and service - """ - status = 'init' - message = '' - executionid = '' - affected_object = '' - remediation_status = '' - logdata = [] - securitystandard = '' - securitystandardversion = '' - standardsupported = '' - controlid = '' - accountid = '' - automationdocid = '' - remediationrole = '' - workflowdoc = '' - workflowaccount = '' - eventtype = '' - resourceregion = '' - workflow_data = {} # Hash for workflow data so that it can be modified in - # in the future without changing the source code - - def __init__(self): - """Set message and status - minimum required fields""" - self.status = '' - self.message = '' - self.remediation_status = '' - self.logdata = [] - - def __str__(self): - return json.dumps(self.__dict__) - - def json(self): - return self.__dict__ - - def update(self, answer_data): - for property, value in answer_data.items(): - if property in properties: - setattr(self, property, value) - -def resource_from_arn(arn): - """ - Strip off the leading parts of the ARN: arn:*:*:*:*: - Return what's left. If no match, return the original predicate. - """ - arn_pattern = re.compile(r'arn\:[\w,-]+:[\w,-]+:.*:\d*:(.*)') - arn_match = arn_pattern.match(arn) - answer = arn - if arn_match: - answer = arn_match.group(1) - return answer - -def partition_from_region(region_name): - """ - returns the partition for a given region - On success returns a string - On failure returns aws - """ - partition = '' - session = boto3.Session() - try: - partition = session.get_partition_for_region(region_name) - except UnknownRegionError: - return 'aws' - - return partition - -def publish_to_sns(topic_name, message, region=''): - """ - Post a message to an SNS topic - """ - if not region: - region = AWS_REGION - partition = partition_from_region(region) - AWS = AWSCachedClient(region) # cached client object - account = boto3.client('sts').get_caller_identity()['Account'] - - topic_arn = f'arn:{partition}:sns:{region}:{account}:{topic_name}' - - message_id = AWS.get_connection('sns', region).publish( - TopicArn=topic_arn, - Message=message - ).get('MessageId', 'error') - - return message_id diff --git a/source/Orchestrator/check_ssm_doc_state.py b/source/Orchestrator/check_ssm_doc_state.py index cfe11df0..8086342e 100644 --- a/source/Orchestrator/check_ssm_doc_state.py +++ b/source/Orchestrator/check_ssm_doc_state.py @@ -1,145 +1,189 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import json -import boto3 import os -from botocore.config import Config + +import boto3 from botocore.exceptions import ClientError -from logger import Logger -from awsapi_cached_client import BotoSession -from sechub_findings import Finding -import utils +from layer import utils +from layer.awsapi_cached_client import BotoSession +from layer.cloudwatch_metrics import CloudWatchMetrics +from layer.logger import Logger +from layer.sechub_findings import Finding -ORCH_ROLE_NAME = 'SO0111-SHARR-Orchestrator-Member' # role to use for cross-account +ORCH_ROLE_NAME = "SO0111-SHARR-Orchestrator-Member" # role to use for cross-account # initialise loggers -LOG_LEVEL = os.getenv('log_level', 'info') +LOG_LEVEL = os.getenv("log_level", "info") LOGGER = Logger(loglevel=LOG_LEVEL) +session = boto3.session.Session() +AWS_REGION = session.region_name -def _get_ssm_client(account, role, region=''): + +def _get_ssm_client(account, role, region=""): """ Create a client for ssm """ kwargs = {} if region: - kwargs['region_name'] = region + kwargs["region_name"] = region + + return BotoSession(account, f"{role}").client("ssm", **kwargs) - return BotoSession( - account, - f'{role}' - ).client('ssm', **kwargs) def _add_doc_state_to_answer(doc, account, region, answer): - # Connect to APIs - ssm = _get_ssm_client( - account, - ORCH_ROLE_NAME, - region - ) - # Validate input try: - docinfo = ssm.describe_document( - Name=doc - )['Document'] + # Connect to APIs + ssm = _get_ssm_client(account, ORCH_ROLE_NAME, region) - doctype = docinfo.get('DocumentType', 'unknown') + # Validate input + docinfo = ssm.describe_document(Name=doc)["Document"] + + doctype = docinfo.get("DocumentType", "unknown") if doctype != "Automation": - answer.update({ - 'status':'ERROR', - 'message':'Document Type is not "Automation": ' + str(doctype) - }) + answer.update( + { + "status": "ERROR", + "message": 'Document Type is not "Automation": ' + str(doctype), + } + ) LOGGER.error(answer.message) - docstate = docinfo.get('Status', 'unknown') + docstate = docinfo.get("Status", "unknown") if docstate != "Active": - answer.update({ - 'status':'NOTACTIVE', - 'message':'Document Status is not "Active": ' + str(docstate) - }) + answer.update( + { + "status": "NOTACTIVE", + "message": 'Document Status is not "Active": ' + str(docstate), + } + ) LOGGER.error(answer.message) - answer.update({ - 'status':'ACTIVE' - }) + answer.update({"status": "ACTIVE"}) except ClientError as ex: - exception_type = ex.response['Error']['Code'] + exception_type = ex.response["Error"]["Code"] if exception_type in "InvalidDocument": - answer.update({ - 'status':'NOTFOUND', - 'message': f'Document {doc} does not exist.' - }) + answer.update( + {"status": "NOTFOUND", "message": f"Document {doc} does not exist."} + ) + LOGGER.error(answer.message) + elif exception_type == "AccessDenied": + answer.update( + { + "status": "ACCESSDENIED", + "message": f"Could not assume role for {doc} in {account} in {region}", + } + ) LOGGER.error(answer.message) + try: + cloudwatch_metrics = CloudWatchMetrics() + cloudwatch_metric = { + "MetricName": "AssumeRoleFailure", + "Unit": "Count", + "Value": 1, + } + cloudwatch_metrics.send_metric(cloudwatch_metric) + except Exception: + LOGGER.debug("Did not send Cloudwatch metric") else: - answer.update({ - 'status':'CLIENTERROR', - 'message':'An unhandled client error occurred: ' + exception_type - }) + answer.update( + { + "status": "CLIENTERROR", + "message": "An unhandled client error occurred: " + exception_type, + } + ) LOGGER.error(answer.message) except Exception as e: - answer.update({ - 'status':'ERROR', - 'message':'An unhandled error occurred: ' + str(e) - }) + answer.update( + {"status": "ERROR", "message": "An unhandled error occurred: " + str(e)} + ) LOGGER.error(answer.message) -def lambda_handler(event, _): - answer = utils.StepFunctionLambdaAnswer() # holds the response to the step function +def lambda_handler(event, _): + answer = utils.StepFunctionLambdaAnswer() # holds the response to the step function LOGGER.info(event) - if "Finding" not in event or \ - "EventType" not in event: - answer.update({ - 'status':'ERROR', - 'message':'Missing required data in request' - }) + if "Finding" not in event or "EventType" not in event: + answer.update( + {"status": "ERROR", "message": "Missing required data in request"} + ) LOGGER.error(answer.message) return answer.json() - finding = Finding(event['Finding']) - - answer.update({ - 'securitystandard': finding.standard_shortname, - 'securitystandardversion': finding.standard_version, - 'controlid': finding.standard_control, - 'standardsupported': finding.standard_version_supported, - 'accountid': finding.account_id, - 'resourceregion': finding.resource_region - }) - - if finding.standard_version_supported != 'True': - answer.update({ - 'status':'NOTENABLED', - 'message':f'Security Standard is not enabled": "{finding.standard_name} version {finding.standard_version}"' - }) + product_name = ( + event["Finding"] + .get("ProductFields", {}) + .get("aws/securityhub/ProductName", "Security Hub") + ) + + if product_name != "Security Hub": + workflow_doc = event.get("Workflow", {}) + non_sec_hub_finding = event["Finding"] + non_sec_hub_resources = non_sec_hub_finding.get("Resources", []) + resource_region = AWS_REGION + if len(non_sec_hub_resources) >= 1: + resource_region = non_sec_hub_resources[0].get("Region", "") + answer.update( + { + "securitystandard": "N/A", + "securitystandardversion": "N/A", + "controlid": "N/A", + "standardsupported": "N/A", + "accountid": non_sec_hub_finding["AwsAccountId"], + "resourceregion": resource_region, + "automationdocid": workflow_doc["WorkflowDocument"], + "remediationrole": ( + workflow_doc["WorkflowRole"] + if workflow_doc["WorkflowRole"] != "" + else "SO0111-UseDefaultRole" + ), + } + ) + answer.update({"status": "ACTIVE"}) + return answer.json() + + finding = Finding(event["Finding"]) + + answer.update( + { + "securitystandard": finding.standard_shortname, + "securitystandardversion": finding.standard_version, + "controlid": finding.standard_control, + "standardsupported": finding.standard_version_supported, + "accountid": finding.account_id, + "resourceregion": finding.resource_region, + } + ) + + if finding.standard_version_supported != "True": + answer.update( + { + "status": "NOTENABLED", + "message": f'Security Standard is not enabled": "{finding.standard_name} version {finding.standard_version}"', + } + ) return answer.json() # Is there alt workflow configuration? - alt_workflow_doc = event.get('Workflow',{}).get('WorkflowDocument', None) + alt_workflow_doc = event.get("Workflow", {}).get("WorkflowDocument", None) - automation_docid = f'ASR-{finding.standard_shortname}_{finding.standard_version}_{finding.remediation_control}' - remediation_role = f'SO0111-Remediate-{finding.standard_shortname}-{finding.standard_version}-{finding.remediation_control}' + automation_docid = f"ASR-{finding.standard_shortname}_{finding.standard_version}_{finding.remediation_control}" + remediation_role = f"SO0111-Remediate-{finding.standard_shortname}-{finding.standard_version}-{finding.remediation_control}" - answer.update({ - 'automationdocid': automation_docid, - 'remediationrole': remediation_role - }) + answer.update( + {"automationdocid": automation_docid, "remediationrole": remediation_role} + ) # If alt workflow is configured we don't need to check doc state, as we checked # it in get_approval_requirement if alt_workflow_doc: - answer.update({ - 'status': 'ACTIVE' - }) + answer.update({"status": "ACTIVE"}) else: _add_doc_state_to_answer( - automation_docid, - finding.account_id, - finding.resource_region, - answer + automation_docid, finding.account_id, finding.resource_region, answer ) return answer.json() diff --git a/source/Orchestrator/check_ssm_execution.py b/source/Orchestrator/check_ssm_execution.py index b44ecb4b..bd950484 100644 --- a/source/Orchestrator/check_ssm_execution.py +++ b/source/Orchestrator/check_ssm_execution.py @@ -1,157 +1,163 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import json +import os import re from json.decoder import JSONDecodeError -import boto3 -import os -from botocore.config import Config -from logger import Logger -from awsapi_cached_client import BotoSession -from sechub_findings import Finding -import utils -from metrics import Metrics +from typing import TYPE_CHECKING, Any, Optional + +from layer import utils +from layer.awsapi_cached_client import BotoSession +from layer.logger import Logger +from layer.metrics import Metrics + +if TYPE_CHECKING: + from mypy_boto3_ssm.client import SSMClient +else: + SSMClient = object -ORCH_ROLE_NAME = 'SO0111-SHARR-Orchestrator-Member' # role to use for cross-account +ORCH_ROLE_NAME = "SO0111-SHARR-Orchestrator-Member" # role to use for cross-account # initialise loggers -LOG_LEVEL = os.getenv('log_level', 'info') +LOG_LEVEL = os.getenv("log_level", "info") LOGGER = Logger(loglevel=LOG_LEVEL) -def _get_ssm_client(account, role, region=''): + +def _get_ssm_client(account: str, role: str, region: str = "") -> SSMClient: """ Create a client for ssm """ kwargs = {} if region: - kwargs['region_name'] = region + kwargs["region_name"] = region + + ssm: SSMClient = BotoSession(account, f"{role}").client("ssm", **kwargs) + return ssm - return BotoSession( - account, - f'{role}' - ).client('ssm', **kwargs) class ParameterError(Exception): - error = 'Invalid parameter input' - def __init__(self, error=''): + error = "Invalid parameter input" + + def __init__(self, error=""): if error: self.error = error super().__init__(self.error) def __str__(self): - return f'{self.error}' + return f"{self.error}" + class AutomationExecution(object): status = None - outputs = {} + outputs: Any = {} failure_message = None - exec_id = None + exec_id: Optional[str] = None account = None role_base_name = None - region = None # Region where the ssm doc is running - _ssm_client = None + region = None # Region where the ssm doc is running def __init__(self, exec_id, account, role_base_name, region): - if not re.match('^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$', exec_id): - raise ParameterError(f'Invalid Automation Execution Id: {exec_id}') + if not re.match( + "^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$", exec_id + ): + raise ParameterError(f"Invalid Automation Execution Id: {exec_id}") self.exec_id = exec_id - if not re.match(r'^\d{12}$', account): - raise ParameterError(f'Invalid Value for Account: {account}') + if not re.match(r"^\d{12}$", account): + raise ParameterError(f"Invalid Value for Account: {account}") self.account = account - if not re.match(r'^[a-z]{2}(?:-gov)?-[a-z]+-\d$', region): - raise ParameterError(f'Invalid Value for Region: {region}') + if not re.match(r"^[a-z]{2}(?:-gov)?-[a-z]+-\d$", region): + raise ParameterError(f"Invalid Value for Region: {region}") self.region = region - if not re.match('^[a-zA-Z0-9_+=,.@-]{1,64}$', role_base_name): - raise ParameterError(f'Invalid Value for Role_Base_Name: {role_base_name}') + if not re.match("^[a-zA-Z0-9_+=,.@-]{1,64}$", role_base_name): + raise ParameterError(f"Invalid Value for Role_Base_Name: {role_base_name}") self._ssm_client = _get_ssm_client(self.account, role_base_name, self.region) self.get_execution_state() def get_execution_state(self): automation_exec_info = self._ssm_client.describe_automation_executions( - Filters=[ - { - 'Key': 'ExecutionId', - 'Values': [ - self.exec_id - ] - } - ] + Filters=[{"Key": "ExecutionId", "Values": [self.exec_id]}] # type: ignore[list-item] ) - self.status = automation_exec_info.get( - "AutomationExecutionMetadataList" - )[0].get( - "AutomationExecutionStatus", - "ERROR" + self.status = automation_exec_info["AutomationExecutionMetadataList"][0].get( + "AutomationExecutionStatus", "ERROR" ) - self.outputs = automation_exec_info.get( - "AutomationExecutionMetadataList" - )[0].get( - "Outputs", - {} + self.outputs = automation_exec_info["AutomationExecutionMetadataList"][0].get( + "Outputs", {} ) - remediation_output_name = 'Remediation.Output' - if remediation_output_name in self.outputs and \ - isinstance(self.outputs[remediation_output_name], list) and \ - len(self.outputs[remediation_output_name]) == 1 and \ - self.outputs[remediation_output_name][0] == "No output available yet because the step is not successfully executed": - self.outputs[remediation_output_name][0] = "See Automation Execution output for details" - - self.failure_message = automation_exec_info.get( - "AutomationExecutionMetadataList" - )[0].get( - "FailureMessage", - "" - ) + remediation_output_name = "Remediation.Output" + if ( + remediation_output_name in self.outputs + and isinstance(self.outputs[remediation_output_name], list) + and len(self.outputs[remediation_output_name]) == 1 + and self.outputs[remediation_output_name][0] + == "No output available yet because the step is not successfully executed" + ): + self.outputs[remediation_output_name][ + 0 + ] = "See Automation Execution output for details" + + self.failure_message = automation_exec_info["AutomationExecutionMetadataList"][ + 0 + ].get("FailureMessage", "") + def valid_automation_doc(automation_doc): - return "SecurityStandard" in automation_doc and \ - "ControlId" in automation_doc and \ - "AccountId" in automation_doc + return ( + "SecurityStandard" in automation_doc + and "ControlId" in automation_doc + and "AccountId" in automation_doc + ) + def get_execution_log(response_data): logdata = [] - if 'ExecutionLog' in response_data: - logdata = response_data['ExecutionLog'].split('\n') + if "ExecutionLog" in response_data: + logdata = response_data["ExecutionLog"].split("\n") return logdata + def get_affected_object(response_data): - affected_object_out = 'UNKNOWN' + affected_object_out = "UNKNOWN" if "ParseInput.AffectedObject" in response_data: - affected_object = response_data.get('ParseInput.AffectedObject')[0] + affected_object = response_data.get("ParseInput.AffectedObject")[0] try: affected_object = json.loads(affected_object) - if 'Type' in affected_object and 'Id' in affected_object: - affected_object_out = affected_object['Type'] + ' ' + affected_object['Id'] + if "Type" in affected_object and "Id" in affected_object: + affected_object_out = ( + affected_object["Type"] + " " + affected_object["Id"] + ) else: affected_object_out = str(affected_object) except JSONDecodeError: - print('Expected serialized json, got ' + str(affected_object)) + print("Expected serialized json, got " + str(affected_object)) affected_object_out = str(affected_object) return affected_object_out + def get_remediation_status(response_data, exec_status): status = exec_status - if 'Payload' in response_data and 'response' in response_data['Payload']: - status = response_data['Payload']['response'].get('status', 'UNKNOWN') - elif 'status' in response_data: - status = response_data['status'] + if "Payload" in response_data and "response" in response_data["Payload"]: + status = response_data["Payload"]["response"].get("status", "UNKNOWN") + elif "status" in response_data: + status = response_data["status"] return status + def get_remediation_message(response_data, remediation_status): - message = f'Remediation status: {remediation_status} - please verify remediation' - if 'Payload' in response_data and 'response' in response_data['Payload']: - message = response_data['Payload']['response'].get('status', 'UNKNOWN') - elif 'message' in response_data: - message = response_data['message'] + message = f"Remediation status: {remediation_status} - please verify remediation" + if "Payload" in response_data and "response" in response_data["Payload"]: + message = response_data["Payload"]["response"].get("status", "UNKNOWN") + elif "message" in response_data: + message = response_data["message"] return message + def get_remediation_response(remediation_response_raw): # Remediation.Response is a list, if present. Only the first item should exist. remediation_response = {} @@ -162,39 +168,45 @@ def get_remediation_response(remediation_response_raw): remediation_response = {"message": remediation_response_raw[0]} except Exception as e: print(e) - print('Unhandled error') + print("Unhandled error") elif isinstance(remediation_response_raw, str): - remediation_response = { "message": remediation_response_raw} + remediation_response = {"message": remediation_response_raw} return remediation_response + def lambda_handler(event, _): answer = utils.StepFunctionLambdaAnswer() - automation_doc = event['AutomationDocument'] + automation_doc = event["AutomationDocument"] if not valid_automation_doc(automation_doc): - answer.update({ - 'status':'ERROR', - 'message':'Missing AutomationDocument data in request: ' + json.dumps(automation_doc) - }) + answer.update( + { + "status": "ERROR", + "message": "Missing AutomationDocument data in request: " + + json.dumps(automation_doc), + } + ) LOGGER.error(answer.message) return answer.json() - SSM_EXEC_ID = event['SSMExecution']['ExecId'] - SSM_ACCOUNT = event['SSMExecution'].get('Account') - SSM_REGION = event['SSMExecution'].get('Region') + SSM_EXEC_ID = event["SSMExecution"]["ExecId"] + SSM_ACCOUNT = event["SSMExecution"].get("Account") + SSM_REGION = event["SSMExecution"].get("Region") - if not all([SSM_ACCOUNT,SSM_REGION]): - exit('ERROR: missing remediation account information. SSMExecution missing region or account.') + if not all([SSM_ACCOUNT, SSM_REGION]): + exit( + "ERROR: missing remediation account information. SSMExecution missing region or account." + ) - metrics_obj = Metrics( - event['EventType'] - ) - metrics_data = metrics_obj.get_metrics_from_finding(event['Finding']) + metrics_obj = Metrics(event["EventType"]) + metrics_data = metrics_obj.get_metrics_from_finding(event["Finding"]) try: - automation_exec_info = AutomationExecution(SSM_EXEC_ID, SSM_ACCOUNT, ORCH_ROLE_NAME, SSM_REGION) + automation_exec_info = AutomationExecution( + SSM_EXEC_ID, SSM_ACCOUNT, ORCH_ROLE_NAME, SSM_REGION + ) except Exception as e: - LOGGER.error(f'Unable to retrieve AutomationExecution data: {str(e)}') + LOGGER.error(f"Unable to retrieve AutomationExecution data: {str(e)}") raise e # Terminal states - get log data from AutomationExecutionMetadataList @@ -211,28 +223,38 @@ def lambda_handler(event, _): # VerifyRemediation.Output or Remediation.Output may be a string, when using a child runbook for # remediation. - if automation_exec_info.status in ('Success', 'TimedOut', 'Cancelled', 'Cancelling', 'Failed'): + if automation_exec_info.status in ( + "Success", + "TimedOut", + "Cancelled", + "Cancelling", + "Failed", + ): ssm_outputs = automation_exec_info.outputs affected_object = get_affected_object(ssm_outputs) remediation_response_raw = None - remediation_output_name = 'Remediation.Output' + remediation_output_name = "Remediation.Output" if remediation_output_name in ssm_outputs: remediation_response_raw = ssm_outputs[remediation_output_name] - elif 'VerifyRemediation.Output' in ssm_outputs: - remediation_response_raw = ssm_outputs['VerifyRemediation.Output'] + elif "VerifyRemediation.Output" in ssm_outputs: + remediation_response_raw = ssm_outputs["VerifyRemediation.Output"] else: remediation_response_raw = json.dumps(ssm_outputs) remediation_response = get_remediation_response(remediation_response_raw) status_for_message = automation_exec_info.status - if automation_exec_info.status == 'Success': - remediation_status = get_remediation_status(remediation_response, automation_exec_info.status) + if automation_exec_info.status == "Success": + remediation_status = get_remediation_status( + remediation_response, automation_exec_info.status + ) status_for_message = remediation_status - print(f'Remediation Status: {remediation_status}') + print(f"Remediation Status: {remediation_status}") - remediation_message = get_remediation_message(remediation_response, status_for_message) + remediation_message = get_remediation_message( + remediation_response, status_for_message + ) remediation_logdata = get_execution_log(remediation_response) @@ -240,31 +262,34 @@ def lambda_handler(event, _): if automation_exec_info.failure_message: remediation_logdata.append(automation_exec_info.failure_message) - answer.update({ - 'status': automation_exec_info.status, - 'remediation_status': status_for_message, - 'message': remediation_message, - 'executionid': SSM_EXEC_ID, - 'affected_object': affected_object, - 'logdata': json.dumps(remediation_logdata, default=str) - }) + answer.update( + { + "status": automation_exec_info.status, + "remediation_status": status_for_message, + "message": remediation_message, + "executionid": SSM_EXEC_ID, + "affected_object": affected_object, + "logdata": json.dumps(remediation_logdata, default=str), + } + ) try: - metrics_data['status'] = status_for_message + metrics_data["status"] = status_for_message metrics_obj.send_metrics(metrics_data) except Exception as e: LOGGER.error(e) - LOGGER.error('Failed to send metrics') + LOGGER.error("Failed to send metrics") else: - - answer.update({ - 'status': automation_exec_info.status, - 'remediation_status': 'running', - 'message': 'Waiting for completion', - 'executionid': SSM_EXEC_ID, - 'affected_object': '', - 'logdata': [] - }) + answer.update( + { + "status": automation_exec_info.status, + "remediation_status": "running", + "message": "Waiting for completion", + "executionid": SSM_EXEC_ID, + "affected_object": "", + "logdata": [], + } + ) return answer.json() diff --git a/source/Orchestrator/exec_ssm_doc.py b/source/Orchestrator/exec_ssm_doc.py index ea85e4f5..3bf8cbd4 100644 --- a/source/Orchestrator/exec_ssm_doc.py +++ b/source/Orchestrator/exec_ssm_doc.py @@ -3,64 +3,55 @@ import json import os import re -import boto3 + from botocore.exceptions import ClientError -from logger import Logger -from awsapi_cached_client import BotoSession -from applogger import LogHandler -from sechub_findings import Finding, SHARRNotification -import utils +from layer import utils +from layer.awsapi_cached_client import BotoSession +from layer.logger import Logger -AWS_PARTITION = os.getenv('AWS_PARTITION', 'aws') -AWS_REGION = os.getenv('AWS_REGION', 'aws') -SOLUTION_ID = os.getenv('SOLUTION_ID', 'SO0111') -SOLUTION_ID = re.sub(r'^DEV-', '', SOLUTION_ID) +AWS_PARTITION = os.getenv("AWS_PARTITION") +AWS_REGION = os.getenv("AWS_REGION") +SOLUTION_ID = os.getenv("SOLUTION_ID", "SO0111") +SOLUTION_ID = re.sub(r"^DEV-", "", SOLUTION_ID) # initialise loggers -LOG_LEVEL = os.getenv('log_level', 'info') +LOG_LEVEL = os.getenv("log_level", "info") LOGGER = Logger(loglevel=LOG_LEVEL) -def _get_ssm_client(account, role, region=''): + +def _get_ssm_client(account, role, region=""): """ Create a client for ssm """ kwargs = {} if region: - kwargs['region_name'] = region + kwargs["region_name"] = region + + return BotoSession(account, f"{role}").client("ssm", **kwargs) - return BotoSession( - account, - f'{role}' - ).client('ssm', **kwargs) def _get_iam_client(accountid, role): """ Create a client for iam """ - return BotoSession( - accountid, - role - ).client('iam') + return BotoSession(accountid, role).client("iam") + def lambda_role_exists(account, rolename): - iam = _get_iam_client( - account, - SOLUTION_ID + '-SHARR-Orchestrator-Member' - ) + iam = _get_iam_client(account, SOLUTION_ID + "-SHARR-Orchestrator-Member") try: - iam.get_role( - RoleName=rolename - ) + iam.get_role(RoleName=rolename) return True except ClientError as ex: - exception_type = ex.response['Error']['Code'] + exception_type = ex.response["Error"]["Code"] if exception_type in "NoSuchEntity": return False else: - exit('An unhandled client error occurred: ' + exception_type) + exit("An unhandled client error occurred: " + exception_type) except Exception as e: - exit('An unhandled error occurred: ' + str(e)) + exit("An unhandled error occurred: " + str(e)) + def lambda_handler(event, _): # Expected: @@ -81,32 +72,43 @@ def lambda_handler(event, _): # } answer = utils.StepFunctionLambdaAnswer() LOGGER.info(event) - if "Finding" not in event or \ - "EventType" not in event: - answer.update({ - 'status':'ERROR', - 'message':'Missing required data in request' - }) + if "Finding" not in event or "EventType" not in event: + answer.update( + {"status": "ERROR", "message": "Missing required data in request"} + ) LOGGER.error(answer.message) return answer.json() - automation_doc = event['AutomationDocument'] - alt_workflow_doc = event.get('Workflow',{}).get('WorkflowDocument', None) - alt_workflow_account = event.get('Workflow',{}).get('WorkflowAccount', None) - alt_workflow_role = event.get('Workflow',{}).get('WorkflowRole', None) + automation_doc = event["AutomationDocument"] + alt_workflow_doc = event.get("Workflow", {}).get("WorkflowDocument", None) + alt_workflow_account = event.get("Workflow", {}).get("WorkflowAccount", None) + alt_workflow_role = event.get("Workflow", {}).get("WorkflowRole", None) - remote_workflow_doc = alt_workflow_doc if alt_workflow_doc else event['AutomationDocument']['AutomationDocId'] + remote_workflow_doc = ( + alt_workflow_doc + if alt_workflow_doc + else event["AutomationDocument"]["AutomationDocId"] + ) - execution_account = alt_workflow_account if alt_workflow_account else automation_doc['AccountId'] - execution_region = AWS_REGION if alt_workflow_account else automation_doc.get('ResourceRegion', '') + execution_account = ( + alt_workflow_account if alt_workflow_account else automation_doc["AccountId"] + ) + execution_region = ( + AWS_REGION if alt_workflow_account else automation_doc.get("ResourceRegion", "") + ) - if "SecurityStandard" not in automation_doc or \ - "ControlId" not in automation_doc or \ - "AccountId" not in automation_doc: - answer.update({ - 'status':'ERROR', - 'message':'Missing AutomationDocument data in request: ' + json.dumps(automation_doc) - }) + if ( + "SecurityStandard" not in automation_doc + or "ControlId" not in automation_doc + or "AccountId" not in automation_doc + ): + answer.update( + { + "status": "ERROR", + "message": "Missing AutomationDocument data in request: " + + json.dumps(automation_doc), + } + ) LOGGER.error(answer.message) return answer.json() @@ -117,44 +119,56 @@ def lambda_handler(event, _): # # In most cases the Orchestrator Member role is used, and it passes # the value in RemediationRole as the AutomationExectutionRole - remediation_role = SOLUTION_ID + '-SHARR-Orchestrator-Member' # default + remediation_role = SOLUTION_ID + "-SHARR-Orchestrator-Member" # default if alt_workflow_doc and alt_workflow_role: remediation_role = alt_workflow_role - elif lambda_role_exists(execution_account, automation_doc['RemediationRole']): - remediation_role = automation_doc['RemediationRole'] + elif lambda_role_exists(execution_account, automation_doc["RemediationRole"]): + remediation_role = automation_doc["RemediationRole"] - print(f'Using role {remediation_role} to execute {remote_workflow_doc} in {execution_account} {execution_region}') + print( + f"Using role {remediation_role} to execute {remote_workflow_doc} in {execution_account} {execution_region}" + ) - remediation_role_arn = f'arn:{AWS_PARTITION}:iam::{execution_account}:role/{remediation_role}' - print(f'ARN: {remediation_role_arn}') + remediation_role_arn = ( + f"arn:{AWS_PARTITION}:iam::{execution_account}:role/{remediation_role}" + ) + print(f"ARN: {remediation_role_arn}") ssm = _get_ssm_client(execution_account, remediation_role, execution_region) ssm_parameters = { - "Finding": [ - json.dumps(event['Finding']) - ], - "AutomationAssumeRole": [ - remediation_role_arn - ] + "Finding": [json.dumps(event["Finding"])], + "AutomationAssumeRole": [remediation_role_arn], } - if remote_workflow_doc != automation_doc['AutomationDocId']: - ssm_parameters["RemediationDoc"] = [automation_doc['AutomationDocId']] - ssm_parameters["Workflow"] = [json.dumps(event.get('Workflow', {}))] + if remote_workflow_doc != automation_doc["AutomationDocId"]: + ssm_parameters["RemediationDoc"] = [automation_doc["AutomationDocId"]] + ssm_parameters["Workflow"] = [json.dumps(event.get("Workflow", {}))] + + # Check if this a security hub finding, if not then we only send the finding. + workflow_data = event.get("Workflow", {}).get("WorkflowConfig", {}) + + if "security_hub" in workflow_data: + if workflow_data["security_hub"] == "false": + ssm_parameters = { + "Finding": [json.dumps(event["Finding"])], + "AutomationAssumeRole": [remediation_role_arn], + } exec_id = ssm.start_automation_execution( # Launch SSM Doc via Automation DocumentName=remote_workflow_doc, - Parameters=ssm_parameters - )['AutomationExecutionId'] - - answer.update({ - 'status':'QUEUED', - 'message': f'{exec_id}: {automation_doc["ControlId"]} remediation was successfully invoked via AWS Systems Manager in account {automation_doc["AccountId"]} {execution_region}', - 'executionid': exec_id, - 'executionregion': execution_region, - 'executionaccount': execution_account - }) + Parameters=ssm_parameters, + )["AutomationExecutionId"] + + answer.update( + { + "status": "QUEUED", + "message": f'{exec_id}: {automation_doc["ControlId"]} remediation was successfully invoked via AWS Systems Manager in account {automation_doc["AccountId"]} {execution_region}', + "executionid": exec_id, + "executionregion": execution_region, + "executionaccount": execution_account, + } + ) LOGGER.info(answer.message) diff --git a/source/Orchestrator/get_approval_requirement.py b/source/Orchestrator/get_approval_requirement.py index a637d344..d30afd67 100644 --- a/source/Orchestrator/get_approval_requirement.py +++ b/source/Orchestrator/get_approval_requirement.py @@ -10,59 +10,64 @@ and stubbed out to support this: _is_remediation_destructive(), etc. """ import json -import boto3 import os import re + +import boto3 from botocore.config import Config from botocore.exceptions import ClientError -from logger import Logger -from awsapi_cached_client import BotoSession -from sechub_findings import Finding -import utils +from layer import utils +from layer.awsapi_cached_client import BotoSession +from layer.logger import Logger +from layer.sechub_findings import Finding # initialise loggers -LOG_LEVEL = os.getenv('log_level', 'info') +LOG_LEVEL = os.getenv("log_level", "info") LOGGER = Logger(loglevel=LOG_LEVEL) # If env WORKFLOW_RUNBOOK is set and not blank then all remediations will be # executed through this runbook, if it is present and enabled in the member # account. -SOLUTION_ID = os.getenv('SOLUTION_ID', 'SO0111') -SOLUTION_ID = re.sub(r'^DEV-', '', SOLUTION_ID) +SOLUTION_ID = os.getenv("SOLUTION_ID", "SO0111") +SOLUTION_ID = re.sub(r"^DEV-", "", SOLUTION_ID) -def _get_ssm_client(account, role, region=''): + +def _get_ssm_client(account, role, region=""): """ Create a client for ssm """ - sess = BotoSession( - account, - f'{role}' - ) + sess = BotoSession(account, f"{role}") kwargs = {} if region: - kwargs['region_name'] = region + kwargs["region_name"] = region + + return sess.client("ssm", **kwargs) - return sess.client('ssm', **kwargs) def _is_remediation_destructive(_, __, ___): return False + def _is_account_sensitive(_): return False + def _is_automatic_trigger(event_type): - if event_type == 'Security Hub Findings - Imported': + if event_type == "Security Hub Findings - Imported": return False else: return True + def _is_custom_action_trigger(event_type): - if event_type == 'Security Hub Findings - Imported': + if event_type == "Security Hub Findings - Imported": return True else: return False + def get_running_account(): - return boto3.client('sts').get_caller_identity()['Account'] + return boto3.client("sts").get_caller_identity()["Account"] + def _get_alternate_workflow(accountid): """ @@ -73,98 +78,147 @@ def _get_alternate_workflow(accountid): running_account = get_running_account() # Is an alternate workflow defined? - WORKFLOW_RUNBOOK = os.getenv('WORKFLOW_RUNBOOK', '') - WORKFLOW_RUNBOOK_ACCOUNT = os.getenv('WORKFLOW_RUNBOOK_ACCOUNT', 'member') - WORKFLOW_RUNBOOK_ROLE = os.getenv('WORKFLOW_RUNBOOK_ROLE', '') + WORKFLOW_RUNBOOK = os.getenv("WORKFLOW_RUNBOOK", "") + WORKFLOW_RUNBOOK_ACCOUNT = os.getenv("WORKFLOW_RUNBOOK_ACCOUNT", "member") + WORKFLOW_RUNBOOK_ROLE = os.getenv("WORKFLOW_RUNBOOK_ROLE", "") # Disabled by removing the Lambda environmental var or setting to '' if not WORKFLOW_RUNBOOK: return (None, None, None) - if WORKFLOW_RUNBOOK_ACCOUNT.lower() == 'member': + if WORKFLOW_RUNBOOK_ACCOUNT.lower() == "member": WORKFLOW_RUNBOOK_ACCOUNT = accountid - elif WORKFLOW_RUNBOOK_ACCOUNT.lower() == 'admin': + elif WORKFLOW_RUNBOOK_ACCOUNT.lower() == "admin": WORKFLOW_RUNBOOK_ACCOUNT = running_account else: # log an error - bad config - LOGGER.error(f'WORKFLOW_RUNBOOK_ACCOUNT config error: "{WORKFLOW_RUNBOOK_ACCOUNT}" is not valid. Must be "member" or "admin"') + LOGGER.error( + f'WORKFLOW_RUNBOOK_ACCOUNT config error: "{WORKFLOW_RUNBOOK_ACCOUNT}" is not valid. Must be "member" or "admin"' + ) return (None, None, None) # Make sure it exists and is active if _doc_is_active(WORKFLOW_RUNBOOK, WORKFLOW_RUNBOOK_ACCOUNT): - return(WORKFLOW_RUNBOOK, WORKFLOW_RUNBOOK_ACCOUNT, WORKFLOW_RUNBOOK_ROLE) + return (WORKFLOW_RUNBOOK, WORKFLOW_RUNBOOK_ACCOUNT, WORKFLOW_RUNBOOK_ROLE) else: - return(None, None, None) + return (None, None, None) + def _doc_is_active(doc, account): try: - ssm = _get_ssm_client(account, SOLUTION_ID + '-SHARR-Orchestrator-Member') - docinfo = ssm.describe_document( - Name=doc - )['Document'] + ssm = _get_ssm_client(account, SOLUTION_ID + "-SHARR-Orchestrator-Member") + docinfo = ssm.describe_document(Name=doc)["Document"] - doctype = docinfo.get('DocumentType', 'unknown') - docstate = docinfo.get('Status', 'unknown') + doctype = docinfo.get("DocumentType", "unknown") + docstate = docinfo.get("Status", "unknown") - if doctype == "Automation" and \ - docstate == "Active": + if doctype == "Automation" and docstate == "Active": return True else: return False except ClientError as ex: - exception_type = ex.response['Error']['Code'] + exception_type = ex.response["Error"]["Code"] if exception_type in "InvalidDocument": return False else: - LOGGER.error('An unhandled client error occurred: ' + exception_type) + LOGGER.error("An unhandled client error occurred: " + exception_type) return False except Exception as e: - LOGGER.error('An unhandled error occurred: ' + str(e)) + LOGGER.error("An unhandled error occurred: " + str(e)) return False + def lambda_handler(event, _): answer = utils.StepFunctionLambdaAnswer() - answer.update({ - 'workflowdoc': '', - 'workflowaccount': '', - 'workflowrole': '', - 'workflow_data': { - 'impact': 'nondestructive', - 'approvalrequired': 'false' + answer.update( + { + "workflowdoc": "", + "workflowaccount": "", + "workflowrole": "", + "workflow_data": {"impact": "nondestructive", "approvalrequired": "false"}, } - }) + ) LOGGER.info(event) - if "Finding" not in event or \ - "EventType" not in event: - answer.update({ - 'status':'ERROR', - 'message':'Missing required data in request' - }) + if "Finding" not in event or "EventType" not in event: + answer.update( + {"status": "ERROR", "message": "Missing required data in request"} + ) LOGGER.error(answer.message) return answer.json() - finding = Finding(event['Finding']) + # + # Check to see if this is a non-sechub finding that we are remediating + # ---------------------------------------------------------------------------------- + product_name = ( + event["Finding"] + .get("ProductFields", {}) + .get("aws/securityhub/ProductName", "Security Hub") + ) - auto_trigger = _is_automatic_trigger(event['EventType']) - is_destructive = _is_remediation_destructive(finding.standard_shortname, finding.standard_version, finding.standard_control) + if product_name != "Security Hub": + non_sec_hub_finding = event["Finding"] + try: + ssm_param = "/Solutions/SO0111/" + if product_name == "Config": + ssm_param += non_sec_hub_finding["Title"] + elif product_name == "Health": + ssm_param += non_sec_hub_finding["GeneratorId"] + elif product_name == "GuardDuty": + ssm_param_type_array = non_sec_hub_finding.get("Types", "") + ssm_param_id = ssm_param_type_array[0].split("-")[1] + ssm_param += ssm_param_id + elif product_name == "Inspector": + ssm_param_id = non_sec_hub_finding.get("ProductFields", {}).get( + "attributes/RULE_TYPE", "" + ) + ssm_param += ssm_param_id + else: + ssm_param += non_sec_hub_finding["Title"] + BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + ssm_client = boto3.client("ssm", config=BOTO_CONFIG) + string_workflow_args = ssm_client.get_parameter(Name=ssm_param) + json_workflow_args = json.loads(string_workflow_args["Parameter"]["Value"]) + answer.update( + { + "workflowdoc": json_workflow_args["RunbookName"], + "workflowrole": json_workflow_args.get("RunbookRole", ""), + "workflow_data": { + "impact": "nondestructive", + "approvalrequired": "false", + "security_hub": "false", + }, + } + ) + return answer.json() + except Exception as error: + answer.update({"status": "ERROR", "message": error}) + LOGGER.error(answer.message) + return answer.json() + + finding = Finding(event["Finding"]) + + auto_trigger = _is_automatic_trigger(event["EventType"]) + is_destructive = _is_remediation_destructive( + finding.standard_shortname, finding.standard_version, finding.standard_control + ) is_sensitive = _is_account_sensitive(finding.account_id) - approval_required = 'false' - remediation_impact = 'nondestructive' - use_alt_workflow = 'false' + approval_required = "false" + remediation_impact = "nondestructive" + use_alt_workflow = "false" # # PUT ADDITIONAL CRITERIA HERE. When done, remediation_impact and approval_required # must be set per your needs - #---------------------------------------------------------------------------------- + # ---------------------------------------------------------------------------------- if auto_trigger and is_destructive and is_sensitive: - remediation_impact = 'destructive' - approval_required = 'true' - use_alt_workflow = 'true' + remediation_impact = "destructive" + approval_required = "true" + use_alt_workflow = "true" - #---------------------------------------------------------------------------------- + # ---------------------------------------------------------------------------------- # Is there an alternative workflow configured? (alt_workflow, alt_account, alt_role) = _get_alternate_workflow(finding.account_id) @@ -174,16 +228,18 @@ def lambda_handler(event, _): # workflow_data can be modified to suit your needs. This data is passed to the # alt_workflow. Using the alt_workflow redirects the remediation to your workflow # only! The normal SHARR workflow will not be executed. - #---------------------------------------------------------------------------------- + # ---------------------------------------------------------------------------------- if alt_workflow and use_alt_workflow: - answer.update({ - 'workflowdoc': alt_workflow, - 'workflowaccount': alt_account, - 'workflowrole': alt_role, - 'workflow_data': { - 'impact': remediation_impact, - 'approvalrequired': approval_required + answer.update( + { + "workflowdoc": alt_workflow, + "workflowaccount": alt_account, + "workflowrole": alt_role, + "workflow_data": { + "impact": remediation_impact, + "approvalrequired": approval_required, + }, } - }) + ) return answer.json() diff --git a/source/Orchestrator/schedule_remediation.py b/source/Orchestrator/schedule_remediation.py new file mode 100644 index 00000000..8215b4c3 --- /dev/null +++ b/source/Orchestrator/schedule_remediation.py @@ -0,0 +1,167 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import os +from datetime import datetime, timezone + +import boto3 +from botocore.config import Config +from layer.cloudwatch_metrics import CloudWatchMetrics +from layer.logger import Logger + +# initialise loggers +LOG_LEVEL = os.getenv("log_level", "info") +LOGGER = Logger(loglevel=LOG_LEVEL) + +boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) + + +def connect_to_dynamodb(): + return boto3.client("dynamodb", config=boto_config) + + +def connect_to_sfn(): + return boto3.client("stepfunctions", config=boto_config) + + +def lambda_handler(event, _): + """ + Schedules a remediation for execution. + + `event` should have the following keys and values: + `Records`: Contains the items that are sent from SQS: + `body`: Contains a JSON string that has: + `ResourceRegion`: The region that the remediation will be run in. + `AccountId`: The account that the remediation will be run on. + `TaskToken`: The task token sent by the Orchestrator to determine whether this execution has completed successfully. + `RemediationDetails`: Details for the remediation that are needed for the next step. + + `context` is ignored + """ + try: + record = event["Records"][0] + body = json.loads(record["body"]) + region = body["ResourceRegion"] + account_id = body["AccountId"] + task_token = body["TaskToken"] + remediation_details = body["RemediationDetails"] + table_key = f"{account_id}-{region}" + table_name = get_table_name() + wait_threshold = get_wait_threshold() + current_timestamp = int(datetime.now(timezone.utc).timestamp()) + + dynamodb_client = connect_to_dynamodb() + sfn_client = connect_to_sfn() + + result = dynamodb_client.get_item( + TableName=table_name, Key={"AccountID-Region": {"S": table_key}} + ) + + if "Item" in result and "LastExecutedTimestamp" in result["Item"]: + found_timestamp_string = result["Item"]["LastExecutedTimestamp"]["S"] + found_timestamp = int(found_timestamp_string) + + new_timestamp = ( + found_timestamp + wait_threshold + if found_time_is_within_wait_threshold(found_timestamp) + else current_timestamp + ) + new_timestamp_ttl = new_timestamp + wait_threshold + + dynamodb_client.put_item( + TableName=table_name, + Item={ + "AccountID-Region": {"S": table_key}, + "LastExecutedTimestamp": {"S": str(new_timestamp)}, + "TTL": {"N": str(new_timestamp_ttl)}, + }, + ConditionExpression="LastExecutedTimestamp = :timestamp", + ExpressionAttributeValues={":timestamp": {"S": found_timestamp_string}}, + ) + create_and_send_metric(new_timestamp, current_timestamp) + return send_success_to_step_function( + sfn_client, task_token, new_timestamp, remediation_details + ) + else: + put_initial_in_dynamodb(table_name, table_key, current_timestamp) + create_and_send_metric(current_timestamp, current_timestamp) + return send_success_to_step_function( + sfn_client, + task_token, + current_timestamp, + remediation_details, + ) + except Exception as e: + sfn_client = connect_to_sfn() + sfn_client.send_task_failure( + taskToken=task_token, + error=e.__class__.__name__, + cause=str(e), + ) + + +def get_wait_threshold() -> int: + wait_threshold_string = os.environ.get("RemediationWaitTime") + if wait_threshold_string is None: + raise ValueError("Cannot proceed without wait threshold set") + return int(wait_threshold_string) + + +def get_table_name() -> str: + table_name = os.environ.get("SchedulingTableName") + if table_name is None: + raise ValueError("Cannot proceed without table name set") + return table_name + + +def found_time_is_within_wait_threshold(found_time: int) -> bool: + return ( + int(datetime.now(timezone.utc).timestamp()) - found_time <= get_wait_threshold() + ) + + +def put_initial_in_dynamodb( + table_name: str, table_key: str, current_timestamp: int +) -> None: + dynamodb_client = connect_to_dynamodb() + wait_threshold = get_wait_threshold() + current_timestamp_ttl = current_timestamp + wait_threshold + + dynamodb_client.put_item( + TableName=table_name, + Item={ + "AccountID-Region": {"S": table_key}, + "LastExecutedTimestamp": {"S": str(current_timestamp)}, + "TTL": {"N": str(current_timestamp_ttl)}, + }, + ) + + +def send_success_to_step_function( + sfn_client, task_token, new_timestamp, remediation_details +): + # Formatting for expected State Machine time + planned_timestamp = datetime.fromtimestamp(new_timestamp, timezone.utc).strftime( + "%Y-%m-%dT%H:%M:%SZ" + ) + output_dict = {"PlannedTimestamp": planned_timestamp} + output_dict.update(remediation_details) + + sfn_client.send_task_success( + taskToken=task_token, + output=json.dumps(output_dict), + ) + return f"Remediation scheduled to execute at {planned_timestamp}" + + +def create_and_send_metric(new_timestamp, current_timestamp): + try: + cloudwatch_metrics = CloudWatchMetrics() + cloudwatch_metric = { + "MetricName": "RemediationSchedulingDelay", + "Unit": "Seconds", + "Value": new_timestamp - current_timestamp, + } + cloudwatch_metrics.send_metric(cloudwatch_metric) + except Exception: + LOGGER.debug("Did not send Cloudwatch metric") diff --git a/source/Orchestrator/send_notifications.py b/source/Orchestrator/send_notifications.py index 456db202..2b644a0e 100644 --- a/source/Orchestrator/send_notifications.py +++ b/source/Orchestrator/send_notifications.py @@ -1,22 +1,27 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import json -from json.decoder import JSONDecodeError -import boto3 import os -import sechub_findings -from logger import Logger -from metrics import Metrics +from json.decoder import JSONDecodeError +from typing import Any, Union + +from layer import sechub_findings +from layer.cloudwatch_metrics import CloudWatchMetrics +from layer.logger import Logger +from layer.metrics import Metrics # Get AWS region from Lambda environment. If not present then we're not # running under lambda, so defaulting to us-east-1 -AWS_REGION = os.getenv('AWS_DEFAULT_REGION', 'us-east-1') # MUST BE SET in global variables -AWS_PARTITION = os.getenv('AWS_PARTITION', 'aws') # MUST BE SET in global variables +AWS_REGION = os.getenv( + "AWS_DEFAULT_REGION", "us-east-1" +) # MUST BE SET in global variables +AWS_PARTITION = os.getenv("AWS_PARTITION", "aws") # MUST BE SET in global variables # initialise loggers -LOG_LEVEL = os.getenv('log_level', 'info') +LOG_LEVEL = os.getenv("log_level", "info") LOGGER = Logger(loglevel=LOG_LEVEL) + def format_details_for_output(details): """Handle various possible formats in the details""" details_formatted = [] @@ -24,7 +29,9 @@ def format_details_for_output(details): details_formatted = details elif isinstance(details, str) and details[0:6] == "Cause:": try: - details_formatted = json.dumps(json.loads(details[7:]), indent=2).split('\n') + details_formatted = json.dumps(json.loads(details[7:]), indent=2).split( + "\n" + ) except JSONDecodeError: details_formatted.append(details[7:]) elif isinstance(details, str): @@ -37,97 +44,128 @@ def format_details_for_output(details): return details_formatted + def set_message_prefix_and_suffix(event): - message_prefix = event['Notification'].get('ExecId','') - message_suffix = event['Notification'].get('AffectedObject', '') + message_prefix = event["Notification"].get("ExecId", "") + message_suffix = event["Notification"].get("AffectedObject", "") if message_prefix: - message_prefix += ': ' + message_prefix += ": " if message_suffix: - message_suffix = f' ({message_suffix})' + message_suffix = f" ({message_suffix})" return message_prefix, message_suffix + def lambda_handler(event, _): # Expected input: # Notification: # Message: string # State: string # Details?: string - # updateSecHub: yes|NO - # Payload: contains the Step Function Input to the previous state and consists of: - # Finding?: json + # Finding?: json + # EventType?: string + # AutomationDocument: # ControlId?: string # SecurityStandard?: string - # EventType?: string message_prefix, message_suffix = set_message_prefix_and_suffix(event) # Get finding status - finding_status = 'FAILED' # default state - if event['Notification']['State'].upper == 'SUCCESS': - finding_status = 'RESOLVED' - elif event['Notification']['State'].upper == 'QUEUED': - finding_status = 'PENDING' + finding_status = "FAILED" # default state + if event["Notification"]["State"].upper == "SUCCESS": + finding_status = "RESOLVED" + elif event["Notification"]["State"].upper == "QUEUED": + finding_status = "PENDING" # elif event['Notification']['State'].upper == 'FAILED': # finding_status = 'FAILED' finding = None - finding_info = '' - if 'Finding' in event: - finding = sechub_findings.Finding(event['Finding']) + finding_info: Union[str, dict[str, Any]] = "" + if "Finding" in event: + finding = sechub_findings.Finding(event["Finding"]) finding_info = { - 'finding_id': finding.uuid, - 'finding_description': finding.description, - 'standard_name': finding.standard_name, - 'standard_version': finding.standard_version, - 'standard_control': finding.standard_control, - 'title': finding.title, - 'region': finding.region, - 'account': finding.account_id, - 'finding_arn': finding.arn + "finding_id": finding.uuid, + "finding_description": finding.description, + "standard_name": finding.standard_name, + "standard_version": finding.standard_version, + "standard_control": finding.standard_control, + "title": finding.title, + "region": finding.region, + "account": finding.account_id, + "finding_arn": finding.arn, } + event_state = event["Notification"]["State"].upper() + # Send anonymous metrics - if 'EventType' in event and 'Finding' in event: - metrics = Metrics(event['EventType']) - metrics_data = metrics.get_metrics_from_finding(event['Finding']) - metrics_data['status'] = finding_status + if "EventType" in event and "Finding" in event: + metrics = Metrics(event["EventType"]) + metrics_data = metrics.get_metrics_from_finding(event["Finding"]) + metrics_data["status"] = finding_status metrics.send_metrics(metrics_data) - if event['Notification']['State'].upper() in ('SUCCESS', 'QUEUED'): + create_and_send_cloudwatch_metric(event_state) + + if event_state in ("SUCCESS", "QUEUED"): notification = sechub_findings.SHARRNotification( - event.get('SecurityStandard', 'SHARR'), + event.get("SecurityStandard", "SHARR"), AWS_REGION, - event.get('ControlId', None) + event.get("ControlId", None), ) - notification.severity = 'INFO' + notification.severity = "INFO" notification.send_to_sns = True - elif event['Notification']['State'].upper() == 'FAILED': + elif event_state == "FAILED": notification = sechub_findings.SHARRNotification( - event.get('SecurityStandard', 'SHARR'), + event.get("SecurityStandard", "SHARR"), AWS_REGION, - event.get('ControlId', None) + event.get("ControlId", None), ) - notification.severity = 'ERROR' + notification.severity = "ERROR" notification.send_to_sns = True - elif event['Notification']['State'].upper() in {'WRONGSTANDARD', 'LAMBDAERROR'}: - notification = sechub_findings.SHARRNotification('SHARR',AWS_REGION, None) - notification.severity = 'ERROR' + elif event_state in {"WRONGSTANDARD", "LAMBDAERROR"}: + notification = sechub_findings.SHARRNotification("SHARR", AWS_REGION, None) + notification.severity = "ERROR" else: notification = sechub_findings.SHARRNotification( - event.get('SecurityStandard', 'SHARR'), + event.get("SecurityStandard", "SHARR"), AWS_REGION, - event.get('ControlId', None) + event.get("ControlId", None), ) - notification.severity = 'ERROR' + notification.severity = "ERROR" if finding: - finding.flag(event['Notification']['Message']) - - notification.message = message_prefix + event['Notification']['Message'] + message_suffix - if 'Details' in event['Notification'] and event['Notification']['Details'] != 'MISSING': - notification.logdata = format_details_for_output(event['Notification']['Details']) + finding.flag(event["Notification"]["Message"]) + + notification.message = ( + message_prefix + event["Notification"]["Message"] + message_suffix + ) + if ( + "Details" in event["Notification"] + and event["Notification"]["Details"] != "MISSING" + ): + notification.logdata = format_details_for_output( + event["Notification"]["Details"] + ) notification.finding_info = finding_info notification.notify() + + +def create_and_send_cloudwatch_metric(event_state): + try: + cloudwatch_metrics = CloudWatchMetrics() + cloudwatch_metric = { + "MetricName": "RemediationOutcome", + "Dimensions": [ + { + "Name": "Outcome", + "Value": event_state, + }, + ], + "Unit": "Count", + "Value": 1, + } + cloudwatch_metrics.send_metric(cloudwatch_metric) + except Exception: + LOGGER.debug("Did not send Cloudwatch metric") diff --git a/source/Orchestrator/test/__init__.py b/source/Orchestrator/test/__init__.py index 7ae68937..6c64ece4 100644 --- a/source/Orchestrator/test/__init__.py +++ b/source/Orchestrator/test/__init__.py @@ -1,5 +1,6 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import sys import os -sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'LambdaLayers')) +import sys + +sys.path.append(os.path.join(os.path.dirname(__file__), "..", "..", "layer")) diff --git a/source/Orchestrator/test/conftest.py b/source/Orchestrator/test/conftest.py index 140e5897..3886f3f0 100644 --- a/source/Orchestrator/test/conftest.py +++ b/source/Orchestrator/test/conftest.py @@ -1,9 +1,11 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import os -import pytest from unittest.mock import patch -from awsapi_cached_client import AWSCachedClient + +import pytest +from layer.awsapi_cached_client import AWSCachedClient + @pytest.fixture(scope="module", autouse=True) def aws_credentials(): @@ -15,9 +17,12 @@ def aws_credentials(): os.environ["SOLUTION_ID"] = "SOTestID" os.environ["AWS_ACCOUNT"] = "123456789012" + @pytest.fixture(scope="module", autouse=True) def mock_get_local_account_id(): - mock = patch.object(AWSCachedClient, '_get_local_account_id', return_value="111111111111") + mock = patch.object( + AWSCachedClient, "_get_local_account_id", return_value="111111111111" + ) mock.start() yield mock.stop() diff --git a/source/Orchestrator/test/test_check_ssm_doc_state.py b/source/Orchestrator/test/test_check_ssm_doc_state.py index a8c343b8..b15999b2 100644 --- a/source/Orchestrator/test/test_check_ssm_doc_state.py +++ b/source/Orchestrator/test/test_check_ssm_doc_state.py @@ -5,25 +5,20 @@ Run from /deployment/build/Orchestrator after running build-s3-dist.sh """ import os -import pytest -import boto3 + import botocore.session from botocore.config import Config -from botocore.stub import Stubber, ANY -from pytest_mock import mocker +from botocore.stub import Stubber from check_ssm_doc_state import lambda_handler -from awsapi_cached_client import AWSCachedClient -import sechub_findings +from layer.awsapi_cached_client import AWSCachedClient + def get_region(): - return os.getenv('AWS_DEFAULT_REGION') + return os.getenv("AWS_DEFAULT_REGION") + + +BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=get_region()) -BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=get_region() -) def workflow_doc(): return { @@ -35,31 +30,27 @@ def workflow_doc(): { "Type": "String", "Name": "AutomationAssumeRole", - "Description": "The ARN of the role that allows Automation to perform the actions on your behalf." + "Description": "The ARN of the role that allows Automation to perform the actions on your behalf.", }, { "Type": "StringMap", "Name": "Finding", - "Description": "The Finding data from the Orchestrator Step Function" + "Description": "The Finding data from the Orchestrator Step Function", }, { "Type": "StringMap", "Name": "SSMExec", - "Description": "Data for decision support in this runbook" + "Description": "Data for decision support in this runbook", }, { "Type": "String", "Name": "RemediationDoc", - "Description": "the SHARR Remediation (ingestion) runbook to execute" - } + "Description": "the SHARR Remediation (ingestion) runbook to execute", + }, ], "Tags": [], "DocumentType": "Automation", - "PlatformTypes": [ - "Windows", - "Linux", - "MacOS" - ], + "PlatformTypes": ["Windows", "Linux", "MacOS"], "DocumentVersion": "1", "HashType": "Sha256", "CreatedDate": 1633985125.065, @@ -68,10 +59,11 @@ def workflow_doc(): "DefaultVersion": "1", "DocumentFormat": "YAML", "LatestVersion": "1", - "Description": "### Document Name - SHARR-Run_Remediation\n\n## What does this document do?\nThis document is executed by the AWS Security Hub Automated Response and Remediation Orchestrator Step Function. It implements controls such as manual approvals based on criteria passed by the Orchestrator.\n\n## Input Parameters\n* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf.\n* Finding: (Required) json-formatted finding data\n* RemediationDoc: (Required) remediation runbook to execute after approval\n* SSMExec: (Required) json-formatted data for decision support in determining approval requirement\n" + "Description": "### Document Name - SHARR-Run_Remediation\n\n## What does this document do?\nThis document is executed by the AWS Security Hub Automated Response and Remediation Orchestrator Step Function. It implements controls such as manual approvals based on criteria passed by the Orchestrator.\n\n## Input Parameters\n* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf.\n* Finding: (Required) json-formatted finding data\n* RemediationDoc: (Required) remediation runbook to execute after approval\n* SSMExec: (Required) json-formatted data for decision support in determining approval requirement\n", } } + def test_sunny_day(mocker): test_input = { "EventType": "Security Hub Findings - Custom Action", @@ -85,44 +77,43 @@ def test_sunny_day(mocker): "StandardsSubscriptionArn": "arn:aws:securityhub:us-east-1:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0", "ControlId": "AutoScaling.1", "StandardsControlArn": "arn:aws:securityhub:us-east-1:111111111111:control/aws-foundational-security-best-practices/v/1.0.0/AutoScaling.1", + "aws/securityhub/ProductName": "Security Hub", }, "Resources": [ { "Type": "AwsAccount", "Id": "arn:aws:autoscaling:us-east-1:111111111111:autoScalingGroup:785df3481e1-cd66-435d-96de-d6ed5416defd:autoScalingGroupName/sharr-test-autoscaling-1", "Partition": "aws", - "Region": "us-east-1" + "Region": "us-east-1", } ], "WorkflowState": "NEW", - "Workflow": { - "Status": "NEW" - }, - "RecordState": "ACTIVE" - } + "Workflow": {"Status": "NEW"}, + "RecordState": "ACTIVE", + }, } expected_good_response = { - 'accountid': '111111111111', - 'automationdocid': 'ASR-AFSBP_1.0.0_AutoScaling.1', - 'controlid': 'AutoScaling.1', - 'logdata': [], - 'message': '', - 'remediation_status': '', - 'remediationrole': 'SO0111-Remediate-AFSBP-1.0.0-AutoScaling.1', - 'resourceregion': 'us-east-1', - 'securitystandard': 'AFSBP', - 'securitystandardversion': '1.0.0', - 'standardsupported': 'True', - 'status': 'ACTIVE' + "accountid": "111111111111", + "automationdocid": "ASR-AFSBP_1.0.0_AutoScaling.1", + "controlid": "AutoScaling.1", + "logdata": [], + "message": "", + "remediation_status": "", + "remediationrole": "SO0111-Remediate-AFSBP-1.0.0-AutoScaling.1", + "resourceregion": "us-east-1", + "securitystandard": "AFSBP", + "securitystandardversion": "1.0.0", + "standardsupported": "True", + "status": "ACTIVE", } # use AWSCachedClient as it will us the same stub for any calls AWS = AWSCachedClient(get_region()) - ssm_c = AWS.get_connection('ssm') + ssm_c = AWS.get_connection("ssm") ssmc_stub = Stubber(ssm_c) ssmc_stub.add_response( - 'get_parameter', + "get_parameter", { "Parameter": { "Name": "/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0/shortname", @@ -131,18 +122,16 @@ def test_sunny_day(mocker): "Version": 1, "LastModifiedDate": "2021-05-11T08:21:43.794000-04:00", "ARN": "arn:aws:ssm:us-east-1:111111111111:parameter/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0/shortname", - "DataType": "text" + "DataType": "text", } - },{ + }, + { "Name": "/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0/shortname" - } - ) - ssmc_stub.add_client_error( - 'get_parameter', - 'ParameterNotFound' + }, ) + ssmc_stub.add_client_error("get_parameter", "ParameterNotFound") ssmc_stub.add_response( - 'get_parameter', + "get_parameter", { "Parameter": { "Name": "/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0", @@ -151,12 +140,12 @@ def test_sunny_day(mocker): "Version": 1, "LastModifiedDate": "2021-05-11T08:21:44.632000-04:00", "ARN": "arn:aws:ssm:us-east-1:111111111111:parameter/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0", - "DataType": "text" + "DataType": "text", } - } + }, ) ssmc_stub.add_response( - 'describe_document', + "describe_document", { "Document": { "Hash": "be480c5a8771035918c439a0c76e1471306a699b7f275fe7e0bea70903dc569a", @@ -172,55 +161,51 @@ def test_sunny_day(mocker): "Name": "AutomationAssumeRole", "Type": "String", "Description": "(Optional) The ARN of the role that allows Automation to perform the actions on your behalf.", - "DefaultValue": "" + "DefaultValue": "", }, { "Name": "SolutionId", "Type": "String", "Description": "AWS Solutions Solution Id", - "DefaultValue": "SO0111" + "DefaultValue": "SO0111", }, { "Name": "Finding", "Type": "StringMap", - "Description": "The input from Step function for ASG1 finding" + "Description": "The input from Step function for ASG1 finding", }, { "Name": "HealthCheckGracePeriod", "Type": "Integer", "Description": "ELB Health Check Grace Period", - "DefaultValue": "30" + "DefaultValue": "30", }, { "Name": "SolutionVersion", "Type": "String", "Description": "AWS Solutions Solution Version", - "DefaultValue": "unknown" - } - ], - "PlatformTypes": [ - "Windows", - "Linux", - "MacOS" + "DefaultValue": "unknown", + }, ], + "PlatformTypes": ["Windows", "Linux", "MacOS"], "DocumentType": "Automation", "SchemaVersion": "0.3", "LatestVersion": "1", "DefaultVersion": "1", "DocumentFormat": "JSON", - "Tags": [] + "Tags": [], } - },{ - "Name": "ASR-AFSBP_1.0.0_AutoScaling.1" - } + }, + {"Name": "ASR-AFSBP_1.0.0_AutoScaling.1"}, ) ssmc_stub.activate() - mocker.patch('check_ssm_doc_state._get_ssm_client', return_value=ssm_c) + mocker.patch("check_ssm_doc_state._get_ssm_client", return_value=ssm_c) assert lambda_handler(test_input, {}) == expected_good_response ssmc_stub.deactivate() + def test_doc_not_active(mocker): test_input = { "EventType": "Security Hub Findings - Custom Action", @@ -234,53 +219,52 @@ def test_doc_not_active(mocker): "StandardsSubscriptionArn": "arn:aws:securityhub:us-east-1:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0", "ControlId": "AutoScaling.1", "StandardsControlArn": "arn:aws:securityhub:us-east-1:111111111111:control/aws-foundational-security-best-practices/v/1.0.0/AutoScaling.17", + "aws/securityhub/ProductName": "Security Hub", }, "Resources": [ { "Type": "AwsAccount", "Id": "arn:aws:autoscaling:us-east-1:111111111111:autoScalingGroup:785df3481e1-cd66-435d-96de-d6ed5416defd:autoScalingGroupName/sharr-test-autoscaling-1", "Partition": "aws", - "Region": "us-east-1" + "Region": "us-east-1", } ], "Compliance": { "Status": "FAILED", "StatusReasons": [ { - "ReasonCode": "CONFIG_EVALUATIONS_EMPTY", - "Description": "AWS Config evaluated your resources against the rule. The rule did not apply to the AWS resources in its scope, the specified resources were deleted, or the evaluation results were deleted." + "ReasonCode": "CONFIG_EVALUATIONS_EMPTY", + "Description": "AWS Config evaluated your resources against the rule. The rule did not apply to the AWS resources in its scope, the specified resources were deleted, or the evaluation results were deleted.", } - ] + ], }, "WorkflowState": "NEW", - "Workflow": { - "Status": "NEW" - }, - "RecordState": "ACTIVE" - } + "Workflow": {"Status": "NEW"}, + "RecordState": "ACTIVE", + }, } expected_good_response = { - 'accountid': '111111111111', - 'automationdocid': 'ASR-AFSBP_1.0.0_AutoScaling.17', - 'controlid': 'AutoScaling.17', - 'logdata': [], - 'message': 'Document ASR-AFSBP_1.0.0_AutoScaling.17 does not exist.', - 'remediation_status': '', - 'resourceregion': 'us-east-1', - 'remediationrole': 'SO0111-Remediate-AFSBP-1.0.0-AutoScaling.17', - 'securitystandard': 'AFSBP', - 'securitystandardversion': '1.0.0', - 'standardsupported': 'True', - 'status': 'NOTFOUND' + "accountid": "111111111111", + "automationdocid": "ASR-AFSBP_1.0.0_AutoScaling.17", + "controlid": "AutoScaling.17", + "logdata": [], + "message": "Document ASR-AFSBP_1.0.0_AutoScaling.17 does not exist.", + "remediation_status": "", + "resourceregion": "us-east-1", + "remediationrole": "SO0111-Remediate-AFSBP-1.0.0-AutoScaling.17", + "securitystandard": "AFSBP", + "securitystandardversion": "1.0.0", + "standardsupported": "True", + "status": "NOTFOUND", } # use AWSCachedClient as it will us the same stub for any calls AWS = AWSCachedClient(get_region()) - ssm_c = AWS.get_connection('ssm') + ssm_c = AWS.get_connection("ssm") ssmc_stub = Stubber(ssm_c) ssmc_stub.add_response( - 'get_parameter', + "get_parameter", { "Parameter": { "Name": "/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0/shortname", @@ -289,18 +273,16 @@ def test_doc_not_active(mocker): "Version": 1, "LastModifiedDate": "2021-05-11T08:21:43.794000-04:00", "ARN": "arn:aws:ssm:us-east-1:111111111111:parameter/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0/shortname", - "DataType": "text" + "DataType": "text", } - },{ + }, + { "Name": "/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0/shortname" - } - ) - ssmc_stub.add_client_error( - 'get_parameter', - 'ParameterNotFound' + }, ) + ssmc_stub.add_client_error("get_parameter", "ParameterNotFound") ssmc_stub.add_response( - 'get_parameter', + "get_parameter", { "Parameter": { "Name": "/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0", @@ -309,21 +291,20 @@ def test_doc_not_active(mocker): "Version": 1, "LastModifiedDate": "2021-05-11T08:21:44.632000-04:00", "ARN": "arn:aws:ssm:us-east-1:111111111111:parameter/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0", - "DataType": "text" + "DataType": "text", } - } - ) - ssmc_stub.add_client_error( - 'describe_document', - 'InvalidDocument' + }, ) + ssmc_stub.add_client_error("describe_document", "InvalidDocument") ssmc_stub.activate() - mocker.patch('check_ssm_doc_state._get_ssm_client', return_value=ssm_c) + mocker.patch("check_ssm_doc_state._get_ssm_client", return_value=ssm_c) + mocker.patch("check_ssm_doc_state.CloudWatchMetrics.send_metric", return_value=None) assert lambda_handler(test_input, {}) == expected_good_response ssmc_stub.deactivate() + def test_client_error(mocker): test_input = { "EventType": "Security Hub Findings - Custom Action", @@ -337,44 +318,43 @@ def test_client_error(mocker): "StandardsSubscriptionArn": "arn:aws:securityhub:us-east-1:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0", "ControlId": "AutoScaling.1", "StandardsControlArn": "arn:aws:securityhub:us-east-1:111111111111:control/aws-foundational-security-best-practices/v/1.0.0/AutoScaling.1", + "aws/securityhub/ProductName": "Security Hub", }, "Resources": [ { "Type": "AwsAccount", "Id": "arn:aws:autoscaling:us-east-1:111111111111:autoScalingGroup:785df3481e1-cd66-435d-96de-d6ed5416defd:autoScalingGroupName/sharr-test-autoscaling-1", "Partition": "aws", - "Region": "us-east-1" + "Region": "us-east-1", } ], "WorkflowState": "NEW", - "Workflow": { - "Status": "NEW" - }, - "RecordState": "ACTIVE" - } + "Workflow": {"Status": "NEW"}, + "RecordState": "ACTIVE", + }, } expected_good_response = { - 'accountid': '111111111111', - 'automationdocid': 'ASR-AFSBP_1.0.0_AutoScaling.1', - 'controlid': 'AutoScaling.1', - 'logdata': [], - 'message': 'An unhandled client error occurred: ADoorIsAjar', - 'remediation_status': '', - 'remediationrole': 'SO0111-Remediate-AFSBP-1.0.0-AutoScaling.1', - 'resourceregion': 'us-east-1', - 'securitystandard': 'AFSBP', - 'securitystandardversion': '1.0.0', - 'standardsupported': 'True', - 'status': 'CLIENTERROR' + "accountid": "111111111111", + "automationdocid": "ASR-AFSBP_1.0.0_AutoScaling.1", + "controlid": "AutoScaling.1", + "logdata": [], + "message": "An unhandled client error occurred: ADoorIsAjar", + "remediation_status": "", + "remediationrole": "SO0111-Remediate-AFSBP-1.0.0-AutoScaling.1", + "resourceregion": "us-east-1", + "securitystandard": "AFSBP", + "securitystandardversion": "1.0.0", + "standardsupported": "True", + "status": "CLIENTERROR", } # use AWSCachedClient as it will us the same stub for any calls AWS = AWSCachedClient(get_region()) - ssm_c = AWS.get_connection('ssm') + ssm_c = AWS.get_connection("ssm") ssmc_stub = Stubber(ssm_c) ssmc_stub.add_response( - 'get_parameter', + "get_parameter", { "Parameter": { "Name": "/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0/shortname", @@ -383,18 +363,16 @@ def test_client_error(mocker): "Version": 1, "LastModifiedDate": "2021-05-11T08:21:43.794000-04:00", "ARN": "arn:aws:ssm:us-east-1:111111111111:parameter/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0/shortname", - "DataType": "text" + "DataType": "text", } - },{ + }, + { "Name": "/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0/shortname" - } - ) - ssmc_stub.add_client_error( - 'get_parameter', - 'ParameterNotFound' + }, ) + ssmc_stub.add_client_error("get_parameter", "ParameterNotFound") ssmc_stub.add_response( - 'get_parameter', + "get_parameter", { "Parameter": { "Name": "/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0", @@ -403,22 +381,20 @@ def test_client_error(mocker): "Version": 1, "LastModifiedDate": "2021-05-11T08:21:44.632000-04:00", "ARN": "arn:aws:ssm:us-east-1:111111111111:parameter/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0", - "DataType": "text" + "DataType": "text", } - } - ) - ssmc_stub.add_client_error( - 'describe_document', - 'ADoorIsAjar' + }, ) + ssmc_stub.add_client_error("describe_document", "ADoorIsAjar") ssmc_stub.activate() - mocker.patch('check_ssm_doc_state._get_ssm_client', return_value=ssm_c) + mocker.patch("check_ssm_doc_state._get_ssm_client", return_value=ssm_c) assert lambda_handler(test_input, {}) == expected_good_response ssmc_stub.deactivate() + def test_control_remap(mocker): test_input = { "EventType": "Security Hub Findings - Custom Action", @@ -426,13 +402,12 @@ def test_control_remap(mocker): "ProductArn": "arn:aws:securityhub:us-east-1::product/aws/securityhub", "GeneratorId": "arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/1.6", "RecordState": "ACTIVE", - "Workflow": { - "Status": "NEW" - }, + "Workflow": {"Status": "NEW"}, "WorkflowState": "NEW", "ProductFields": { "RuleId": "1.6", "StandardsControlArn": "arn:aws:securityhub:us-east-1:111111111111:control/cis-aws-foundations-benchmark/v/1.2.0/1.6", + "aws/securityhub/ProductName": "Security Hub", }, "AwsAccountId": "111111111111", "Id": "arn:aws:securityhub:us-east-1:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0/1.6/finding/3fe13eb6-b093-48b2-ba3b-b975347c3183", @@ -441,32 +416,32 @@ def test_control_remap(mocker): "Partition": "aws", "Type": "AwsAccount", "Region": "us-east-1", - "Id": "AWS::::Account:111111111111" + "Id": "AWS::::Account:111111111111", } - ] - } + ], + }, } expected_good_response = { - 'accountid': '111111111111', - 'automationdocid': 'ASR-CIS_1.2.0_1.5', - 'controlid': '1.6', - 'logdata': [], - 'message': '', - 'remediation_status': '', - 'resourceregion': 'us-east-1', - 'remediationrole': 'SO0111-Remediate-CIS-1.2.0-1.5', - 'securitystandard': 'CIS', - 'securitystandardversion': '1.2.0', - 'standardsupported': 'True', - 'status': 'ACTIVE' + "accountid": "111111111111", + "automationdocid": "ASR-CIS_1.2.0_1.5", + "controlid": "1.6", + "logdata": [], + "message": "", + "remediation_status": "", + "resourceregion": "us-east-1", + "remediationrole": "SO0111-Remediate-CIS-1.2.0-1.5", + "securitystandard": "CIS", + "securitystandardversion": "1.2.0", + "standardsupported": "True", + "status": "ACTIVE", } AWS = AWSCachedClient(get_region()) - ssm_c = AWS.get_connection('ssm') + ssm_c = AWS.get_connection("ssm") ssmc_stub = Stubber(ssm_c) ssmc_stub.add_response( - 'get_parameter', + "get_parameter", { "Parameter": { "Name": "/Solutions/SO0111/cis-aws-foundations-benchmark/1.2.0/shortname", @@ -475,14 +450,13 @@ def test_control_remap(mocker): "Version": 1, "LastModifiedDate": "2021-05-11T08:21:43.794000-04:00", "ARN": "arn:aws:ssm:us-east-1:111111111111:parameter/Solutions/SO0111/cis-aws-foundations-benchmark/1.2.0/shortname", - "DataType": "text" + "DataType": "text", } - },{ - "Name": "/Solutions/SO0111/cis-aws-foundations-benchmark/1.2.0/shortname" - } + }, + {"Name": "/Solutions/SO0111/cis-aws-foundations-benchmark/1.2.0/shortname"}, ) ssmc_stub.add_response( - 'get_parameter', + "get_parameter", { "Parameter": { "Name": "/Solutions/SO0111/CIS/1.2.0/1.6/remap", @@ -491,14 +465,13 @@ def test_control_remap(mocker): "Version": 1, "LastModifiedDate": "2021-05-11T08:21:43.794000-04:00", "ARN": "arn:aws:ssm:us-east-1:111111111111:parameter/Solutions/SO0111/CIS/1.2.0/1.6/remap", - "DataType": "text" + "DataType": "text", } - },{ - "Name": "/Solutions/SO0111/CIS/1.2.0/1.6/remap" - } + }, + {"Name": "/Solutions/SO0111/CIS/1.2.0/1.6/remap"}, ) ssmc_stub.add_response( - 'get_parameter', + "get_parameter", { "Parameter": { "Name": "/Solutions/SO0111/cis-aws-foundations-benchmark/1.2.0/status", @@ -507,15 +480,14 @@ def test_control_remap(mocker): "Version": 1, "LastModifiedDate": "2021-05-11T08:21:44.632000-04:00", "ARN": "arn:aws:ssm:us-east-1:111111111111:parameter/Solutions/SO0111/cis-aws-foundations-benchmark/1.2.0/status", - "DataType": "text" + "DataType": "text", } - },{ - "Name": "/Solutions/SO0111/cis-aws-foundations-benchmark/1.2.0/status" - } + }, + {"Name": "/Solutions/SO0111/cis-aws-foundations-benchmark/1.2.0/status"}, ) ssmc_stub.add_response( - 'describe_document', + "describe_document", { "Document": { "Hash": "9ca1ee49ff33196adad3fa19624d18943c018b78721999e256ecd4d2246cf1e5", @@ -531,54 +503,51 @@ def test_control_remap(mocker): "Name": "AutomationAssumeRole", "Type": "String", "Description": "(Optional) The ARN of the role that allows Automation to perform the actions on your behalf.", - "DefaultValue": "" + "DefaultValue": "", }, { "Name": "SolutionId", "Type": "String", "Description": "AWS Solutions Solution Id", - "DefaultValue": "SO0111" + "DefaultValue": "SO0111", }, { "Name": "Finding", "Type": "StringMap", - "Description": "The input from Step function for ASG1 finding" + "Description": "The input from Step function for ASG1 finding", }, { "Name": "HealthCheckGracePeriod", "Type": "Integer", "Description": "ELB Health Check Grace Period", - "DefaultValue": "30" + "DefaultValue": "30", }, { "Name": "SolutionVersion", "Type": "String", "Description": "AWS Solutions Solution Version", - "DefaultValue": "unknown" - } - ], - "PlatformTypes": [ - "Windows", - "Linux", - "MacOS" + "DefaultValue": "unknown", + }, ], + "PlatformTypes": ["Windows", "Linux", "MacOS"], "DocumentType": "Automation", "SchemaVersion": "0.3", "LatestVersion": "1", "DefaultVersion": "1", "DocumentFormat": "JSON", - "Tags": [] + "Tags": [], } - } + }, ) ssmc_stub.activate() - mocker.patch('check_ssm_doc_state._get_ssm_client', return_value=ssm_c) + mocker.patch("check_ssm_doc_state._get_ssm_client", return_value=ssm_c) assert lambda_handler(test_input, {}) == expected_good_response ssmc_stub.deactivate() -#=============================================================================== + +# =============================================================================== def test_alt_workflow_with_role(mocker): test_input = { "EventType": "Security Hub Findings - Custom Action", @@ -586,13 +555,12 @@ def test_alt_workflow_with_role(mocker): "ProductArn": "arn:aws:securityhub:us-east-1::product/aws/securityhub", "GeneratorId": "arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/1.6", "RecordState": "ACTIVE", - "Workflow": { - "Status": "NEW" - }, + "Workflow": {"Status": "NEW"}, "WorkflowState": "NEW", "ProductFields": { "RuleId": "1.6", "StandardsControlArn": "arn:aws:securityhub:us-east-1:111111111111:control/cis-aws-foundations-benchmark/v/1.2.0/1.6", + "aws/securityhub/ProductName": "Security Hub", }, "AwsAccountId": "111111111111", "Id": "arn:aws:securityhub:us-east-1:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0/1.6/finding/3fe13eb6-b093-48b2-ba3b-b975347c3183", @@ -601,34 +569,32 @@ def test_alt_workflow_with_role(mocker): "Partition": "aws", "Type": "AwsAccount", "Region": "us-east-1", - "Id": "AWS::::Account:111111111111" + "Id": "AWS::::Account:111111111111", } - ] + ], }, - "Workflow": { - "WorkflowDocument": "AlternateDoc" - } + "Workflow": {"WorkflowDocument": "AlternateDoc"}, } expected_good_response = { - 'accountid': '111111111111', - 'automationdocid': 'ASR-CIS_1.2.0_1.6', - 'controlid': '1.6', - 'logdata': [], - 'message': '', - 'remediation_status': '', - 'resourceregion': 'us-east-1', - 'remediationrole': 'SO0111-Remediate-CIS-1.2.0-1.6', - 'securitystandard': 'CIS', - 'securitystandardversion': '1.2.0', - 'standardsupported': 'True', - 'status': 'ACTIVE' + "accountid": "111111111111", + "automationdocid": "ASR-CIS_1.2.0_1.6", + "controlid": "1.6", + "logdata": [], + "message": "", + "remediation_status": "", + "resourceregion": "us-east-1", + "remediationrole": "SO0111-Remediate-CIS-1.2.0-1.6", + "securitystandard": "CIS", + "securitystandardversion": "1.2.0", + "standardsupported": "True", + "status": "ACTIVE", } - ssm = botocore.session.get_session().create_client('ssm', config=BOTO_CONFIG) + ssm = botocore.session.get_session().create_client("ssm", config=BOTO_CONFIG) ssm_stubber = Stubber(ssm) ssm_stubber.add_response( - 'get_parameter', + "get_parameter", { "Parameter": { "Name": "/Solutions/SO0111/cis-aws-foundations-benchmark/1.2.0/shortname", @@ -637,20 +603,16 @@ def test_alt_workflow_with_role(mocker): "Version": 1, "LastModifiedDate": "2021-05-11T08:21:43.794000-04:00", "ARN": "arn:aws:ssm:us-east-1:111111111111:parameter/Solutions/SO0111/aws-foundational-security-best-practices/1.2.0/shortname", - "DataType": "text" + "DataType": "text", } - },{ - "Name": "/Solutions/SO0111/cis-aws-foundations-benchmark/1.2.0/shortname" - } + }, + {"Name": "/Solutions/SO0111/cis-aws-foundations-benchmark/1.2.0/shortname"}, ) - ssm_stubber.add_client_error( - 'get_parameter', - 'ParameterNotFound' - ) + ssm_stubber.add_client_error("get_parameter", "ParameterNotFound") ssm_stubber.add_response( - 'get_parameter', + "get_parameter", { "Parameter": { "Name": "/Solutions/SO0111/cis-aws-foundations-benchmark/1.2.0", @@ -659,19 +621,16 @@ def test_alt_workflow_with_role(mocker): "Version": 1, "LastModifiedDate": "2021-05-11T08:21:44.632000-04:00", "ARN": "arn:aws:ssm:us-east-1:111111111111:parameter/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0", - "DataType": "text" + "DataType": "text", } - } + }, ) - ssm_stubber.add_response( - 'describe_document', - workflow_doc() - ) + ssm_stubber.add_response("describe_document", workflow_doc()) ssm_stubber.activate() - mocker.patch('check_ssm_doc_state._get_ssm_client', return_value=ssm) - mocker.patch('sechub_findings.get_ssm_connection', return_value=ssm) + mocker.patch("check_ssm_doc_state._get_ssm_client", return_value=ssm) + mocker.patch("layer.sechub_findings.get_ssm_connection", return_value=ssm) result = lambda_handler(test_input, {}) diff --git a/source/Orchestrator/test/test_check_ssm_execution.py b/source/Orchestrator/test/test_check_ssm_execution.py index 328b6191..6e0abb77 100644 --- a/source/Orchestrator/test/test_check_ssm_execution.py +++ b/source/Orchestrator/test/test_check_ssm_execution.py @@ -4,19 +4,21 @@ Unit Test: check_ssm_execution.py Run from /deployment/temp/source/Orchestrator after running build-s3-dist.sh """ - import os -import pytest +from typing import Any + import boto3 -from botocore.stub import Stubber, ANY -from check_ssm_execution import lambda_handler, AutomationExecution -from awsapi_cached_client import AWSCachedClient -from pytest_mock import mocker +import pytest +from botocore.stub import ANY, Stubber +from check_ssm_execution import AutomationExecution, lambda_handler +from layer.awsapi_cached_client import AWSCachedClient + def get_region(): - return os.getenv('AWS_DEFAULT_REGION') + return os.getenv("AWS_DEFAULT_REGION") -test_event = { + +test_event: Any = { "EventType": "Security Hub Findings - Custom Action", "Finding": { "SchemaVersion": "2018-10-08", @@ -35,14 +37,14 @@ def get_region(): "Product": 0, "Label": "INFORMATIONAL", "Normalized": 0, - "Original": "INFORMATIONAL" + "Original": "INFORMATIONAL", }, "Title": "AutoScaling.1 Auto scaling groups associated with a load balancer should use load balancer health checks", "Description": "This control checks whether your Auto Scaling groups that are associated with a load balancer are using Elastic Load Balancing health checks.", "Remediation": { "Recommendation": { "Text": "For directions on how to fix this issue, please consult the AWS Security Hub Foundational Security Best Practices documentation.", - "Url": "https://docs.aws.amazon.com/console/securityhub/AutoScaling.1/remediation" + "Url": "https://docs.aws.amazon.com/console/securityhub/AutoScaling.1/remediation", } }, "ProductFields": { @@ -56,14 +58,14 @@ def get_region(): "aws/securityhub/ProductName": "Security Hub", "aws/securityhub/CompanyName": "AWS", "aws/securityhub/annotation": "AWS Config evaluated your resources against the rule. The rule did not apply to the AWS resources in its scope, the specified resources were deleted, or the evaluation results were deleted.", - "aws/securityhub/FindingId": "arn:aws:securityhub:us-east-1::product/aws/securityhub/arn:aws:securityhub:us-east-1:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0/AutoScaling.1/finding/635ceb5d-3dfd-4458-804e-48a42cd723e4" + "aws/securityhub/FindingId": "arn:aws:securityhub:us-east-1::product/aws/securityhub/arn:aws:securityhub:us-east-1:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0/AutoScaling.1/finding/635ceb5d-3dfd-4458-804e-48a42cd723e4", }, "Resources": [ { "Type": "AwsAccount", "Id": "arn:aws:autoscaling:us-east-1:111111111111:autoScalingGroup:785df3481e1-cd66-435d-96de-d6ed5416defd:autoScalingGroupName/sharr-test-autoscaling-1", "Partition": "aws", - "Region": "us-east-1" + "Region": "us-east-1", } ], "Compliance": { @@ -71,33 +73,31 @@ def get_region(): "StatusReasons": [ { "ReasonCode": "CONFIG_EVALUATIONS_EMPTY", - "Description": "AWS Config evaluated your resources against the rule. The rule did not apply to the AWS resources in its scope, the specified resources were deleted, or the evaluation results were deleted." + "Description": "AWS Config evaluated your resources against the rule. The rule did not apply to the AWS resources in its scope, the specified resources were deleted, or the evaluation results were deleted.", } - ] + ], }, "WorkflowState": "NEW", - "Workflow": { - "Status": "NEW" - }, - "RecordState": "ACTIVE" + "Workflow": {"Status": "NEW"}, + "RecordState": "ACTIVE", }, "AutomationDocument": { "DocState": "ACTIVE", "SecurityStandardVersion": "1.0.0", "AccountId": "111111111111", - "Message": "Document Status is not \"Active\": unknown", + "Message": 'Document Status is not "Active": unknown', "AutomationDocId": "SHARR-AFSBP_1.0.0_AutoScaling.1", "RemediationRole": "SO0111-Remediate-AFSBP-1.0.0-AutoScaling.1", "ControlId": "AutoScaling.1", "SecurityStandard": "AFSBP", - "SecurityStandardSupported": "True" + "SecurityStandardSupported": "True", }, "SSMExecution": { "Message": "AutoScaling.1remediation was successfully invoked via AWS Systems Manager in account 111111111111: 43374019-a309-4627-b8a2-c641e0140262", "ExecId": "43374019-a309-4627-b8a2-c641e0140262", "ExecState": "SUCCESS", "Account": "111111111111", - "Region": "us-east-1" + "Region": "us-east-1", }, "Remediation": { "LogData": [], @@ -105,8 +105,8 @@ def get_region(): "ExecId": "43374019-a309-4627-b8a2-c641e0140262", "Message": "Waiting for completion", "AffectedObject": "", - "ExecState": "InProgress" - } + "ExecState": "InProgress", + }, } ssm_mocked_failed_response = { @@ -126,16 +126,13 @@ def get_region(): ], "Remediation.Output": [ "No output available yet because the step is not successfully executed" - ] + ], }, "Mode": "Auto", "FailureMessage": "Step fails when it is Poll action status for completion. Traceback (most recent call last):\n File \"/tmp/5a927c4c-3d51-4915-8d7e-82fc4c61e479-2021-05-24-14-57-35/customer_script.py\", line 4, in parse_event\n my_control_id = event['expected_control_id']\n\nKeyError - 'expected_control_id'. Please refer to Automation Service Troubleshooting Guide for more diagnosis details.", "Targets": [], - "ResolvedTargets": { - "ParameterValues": [], - "Truncated": False - }, - "AutomationType": "Local" + "ResolvedTargets": {"ParameterValues": [], "Truncated": False}, + "AutomationType": "Local", } ] } @@ -152,257 +149,268 @@ def get_region(): "LogFile": "", "Outputs": { "ModifyAccount.EnableEbsEncryptionByDefaultResponse": [ - "{\"EbsEncryptionByDefault\":true,\"ResponseMetadata\":{\"RequestId\":\"c45a9839-5a40-472e-ac83-d0058987948c\",\"HTTPStatusCode\":200,\"HTTPHeaders\":{\"x-amzn-requestid\":\"c45a9839-5a40-472e-ac83-d0058987948c\",\"cache-control\":\"no-cache, no-store\",\"strict-transport-security\":\"max-age\\u003d31536000; includeSubDomains\",\"content-type\":\"text/xml;charset\\u003dUTF-8\",\"transfer-encoding\":\"chunked\",\"vary\":\"accept-encoding\",\"date\":\"Thu, 06 May 2021 19:16:14 GMT\",\"server\":\"AmazonEC2\"},\"RetryAttempts\":0}}" + '{"EbsEncryptionByDefault":true,"ResponseMetadata":{"RequestId":"c45a9839-5a40-472e-ac83-d0058987948c","HTTPStatusCode":200,"HTTPHeaders":{"x-amzn-requestid":"c45a9839-5a40-472e-ac83-d0058987948c","cache-control":"no-cache, no-store","strict-transport-security":"max-age\\u003d31536000; includeSubDomains","content-type":"text/xml;charset\\u003dUTF-8","transfer-encoding":"chunked","vary":"accept-encoding","date":"Thu, 06 May 2021 19:16:14 GMT","server":"AmazonEC2"},"RetryAttempts":0}}' ] }, "Mode": "Auto", "ParentAutomationExecutionId": "795cf453-c41a-48df-aace-fd68fdace188", "Targets": [], - "ResolvedTargets": { - "ParameterValues": [], - "Truncated": False - }, - "AutomationType": "Local" + "ResolvedTargets": {"ParameterValues": [], "Truncated": False}, + "AutomationType": "Local", } ] } + def test_failed_remediation(mocker): """ Verifies correct operation when a child remediation fails """ AWS = AWSCachedClient(get_region()) - account = '111111111111' - test_event['AutomationDocument']['AccountId'] = account - ssm_c = AWS.get_connection('ssm') + account = "111111111111" + test_event["AutomationDocument"]["AccountId"] = account + ssm_c = AWS.get_connection("ssm") expected_result = { - 'affected_object': 'No output available yet because the step is not successfully executed', - 'executionid': '43374019-a309-4627-b8a2-c641e0140262', - 'logdata': ANY, - 'remediation_status': 'Failed', - 'status': 'Failed', - 'message': 'See Automation Execution output for details' - } + "affected_object": "No output available yet because the step is not successfully executed", + "executionid": "43374019-a309-4627-b8a2-c641e0140262", + "logdata": ANY, + "remediation_status": "Failed", + "status": "Failed", + "message": "See Automation Execution output for details", + } ssmc_stub = Stubber(ssm_c) + ssmc_stub.add_response("get_parameter", {}) ssmc_stub.add_response( - 'get_parameter', - {} - ) - ssmc_stub.add_response( - 'describe_automation_executions', + "describe_automation_executions", ssm_mocked_failed_response, - {'Filters': [{'Key': 'ExecutionId', 'Values': ['43374019-a309-4627-b8a2-c641e0140262']}]} + { + "Filters": [ + { + "Key": "ExecutionId", + "Values": ["43374019-a309-4627-b8a2-c641e0140262"], + } + ] + }, ) ssmc_stub.activate() - mocker.patch('check_ssm_execution._get_ssm_client', return_value=ssm_c) + mocker.patch("check_ssm_execution._get_ssm_client", return_value=ssm_c) response = lambda_handler(test_event, {}) assert response == expected_result ssmc_stub.deactivate() + def test_successful_remediation(mocker): """ Verifies correct operation for successful remediation """ - ssm_c = boto3.client('ssm') - account = '111111111111' - test_event['AutomationDocument']['AccountId'] = account - test_event['SSMExecution']['ExecId'] = '5f12697a-70a5-4a64-83e6-b7d429ec2b17' + ssm_c = boto3.client("ssm") + account = "111111111111" + test_event["AutomationDocument"]["AccountId"] = account + test_event["SSMExecution"]["ExecId"] = "5f12697a-70a5-4a64-83e6-b7d429ec2b17" expected_result = { - 'affected_object': 'UNKNOWN', - 'executionid': '5f12697a-70a5-4a64-83e6-b7d429ec2b17', - 'logdata': '[]', - 'message': '{"ModifyAccount.EnableEbsEncryptionByDefaultResponse": ["{\\"EbsEncryptionByDefault\\":true,\\"ResponseMetadata\\":{\\"RequestId\\":\\"c45a9839-5a40-472e-ac83-d0058987948c\\",\\"HTTPStatusCode\\":200,\\"HTTPHeaders\\":{\\"x-amzn-requestid\\":\\"c45a9839-5a40-472e-ac83-d0058987948c\\",\\"cache-control\\":\\"no-cache, no-store\\",\\"strict-transport-security\\":\\"max-age\\\\u003d31536000; includeSubDomains\\",\\"content-type\\":\\"text/xml;charset\\\\u003dUTF-8\\",\\"transfer-encoding\\":\\"chunked\\",\\"vary\\":\\"accept-encoding\\",\\"date\\":\\"Thu, 06 May 2021 19:16:14 GMT\\",\\"server\\":\\"AmazonEC2\\"},\\"RetryAttempts\\":0}}"]}', - 'remediation_status': 'Success', - 'status': 'Success' + "affected_object": "UNKNOWN", + "executionid": "5f12697a-70a5-4a64-83e6-b7d429ec2b17", + "logdata": "[]", + "message": '{"ModifyAccount.EnableEbsEncryptionByDefaultResponse": ["{\\"EbsEncryptionByDefault\\":true,\\"ResponseMetadata\\":{\\"RequestId\\":\\"c45a9839-5a40-472e-ac83-d0058987948c\\",\\"HTTPStatusCode\\":200,\\"HTTPHeaders\\":{\\"x-amzn-requestid\\":\\"c45a9839-5a40-472e-ac83-d0058987948c\\",\\"cache-control\\":\\"no-cache, no-store\\",\\"strict-transport-security\\":\\"max-age\\\\u003d31536000; includeSubDomains\\",\\"content-type\\":\\"text/xml;charset\\\\u003dUTF-8\\",\\"transfer-encoding\\":\\"chunked\\",\\"vary\\":\\"accept-encoding\\",\\"date\\":\\"Thu, 06 May 2021 19:16:14 GMT\\",\\"server\\":\\"AmazonEC2\\"},\\"RetryAttempts\\":0}}"]}', + "remediation_status": "Success", + "status": "Success", } ssmc_stub = Stubber(ssm_c) ssmc_stub.add_response( - 'describe_automation_executions', + "describe_automation_executions", ssm_mocked_good_response, - {'Filters': [{'Key': 'ExecutionId', 'Values': ['5f12697a-70a5-4a64-83e6-b7d429ec2b17']}]} + { + "Filters": [ + { + "Key": "ExecutionId", + "Values": ["5f12697a-70a5-4a64-83e6-b7d429ec2b17"], + } + ] + }, ) ssmc_stub.activate() - mocker.patch('check_ssm_execution._get_ssm_client', return_value=ssm_c) - mocker.patch('check_ssm_execution.Metrics.send_metrics', return_value=False) - mocker.patch('check_ssm_execution.Metrics.get_metrics_from_finding', return_value=False) - mocker.patch('check_ssm_execution.Metrics.__init__', return_value=None) + mocker.patch("check_ssm_execution._get_ssm_client", return_value=ssm_c) + mocker.patch("check_ssm_execution.Metrics.send_metrics", return_value=False) + mocker.patch( + "check_ssm_execution.Metrics.get_metrics_from_finding", return_value=False + ) + mocker.patch("check_ssm_execution.Metrics.__init__", return_value=None) response = lambda_handler(test_event, {}) assert response == expected_result ssmc_stub.deactivate() + def test_execid_parsing_nonsharr(mocker): """ Verifies correct operation for successful remediation """ - ssm_c = boto3.client('ssm') - account = '111111111111' - test_event['AutomationDocument']['AccountId'] = account - test_event['SSMExecution']['ExecId'] = '5f12697a-70a5-4a64-83e6-b7d429ec2b17' - - expected_result = { - 'affected_object': 'UNKNOWN', - 'executionid': '5f12697a-70a5-4a64-83e6-b7d429ec2b17', - 'logdata': '[]', - 'message': '{"ModifyAccount.EnableEbsEncryptionByDefaultResponse": ["{\\"EbsEncryptionByDefault\\":true,\\"ResponseMetadata\\":{\\"RequestId\\":\\"c45a9839-5a40-472e-ac83-d0058987948c\\",\\"HTTPStatusCode\\":200,\\"HTTPHeaders\\":{\\"x-amzn-requestid\\":\\"c45a9839-5a40-472e-ac83-d0058987948c\\",\\"cache-control\\":\\"no-cache, no-store\\",\\"strict-transport-security\\":\\"max-age\\\\u003d31536000; includeSubDomains\\",\\"content-type\\":\\"text/xml;charset\\\\u003dUTF-8\\",\\"transfer-encoding\\":\\"chunked\\",\\"vary\\":\\"accept-encoding\\",\\"date\\":\\"Thu, 06 May 2021 19:16:14 GMT\\",\\"server\\":\\"AmazonEC2\\"},\\"RetryAttempts\\":0}}"]}', - 'remediation_status': 'Success', - 'status': 'Success' - } + ssm_c = boto3.client("ssm") + account = "111111111111" + test_event["AutomationDocument"]["AccountId"] = account + test_event["SSMExecution"]["ExecId"] = "5f12697a-70a5-4a64-83e6-b7d429ec2b17" ssmc_stub = Stubber(ssm_c) ssmc_stub.add_response( - 'describe_automation_executions', + "describe_automation_executions", ssm_mocked_good_response, - {'Filters': [{'Key': 'ExecutionId', 'Values': ['5f12697a-70a5-4a64-83e6-b7d429ec2b17']}]} + { + "Filters": [ + { + "Key": "ExecutionId", + "Values": ["5f12697a-70a5-4a64-83e6-b7d429ec2b17"], + } + ] + }, ) ssmc_stub.activate() - mocker.patch('check_ssm_execution._get_ssm_client', return_value=ssm_c) + mocker.patch("check_ssm_execution._get_ssm_client", return_value=ssm_c) automation_exec_info = AutomationExecution( - test_event['SSMExecution']['ExecId'], - account, - 'foo-bar-baz', - 'us-east-1' - ) - assert automation_exec_info.status == 'Success' - assert automation_exec_info.outputs == ssm_mocked_good_response['AutomationExecutionMetadataList'][0]['Outputs'] - assert automation_exec_info.failure_message == '' + test_event["SSMExecution"]["ExecId"], account, "foo-bar-baz", "us-east-1" + ) + assert automation_exec_info.status == "Success" + assert ( + automation_exec_info.outputs + == ssm_mocked_good_response["AutomationExecutionMetadataList"][0]["Outputs"] + ) + assert automation_exec_info.failure_message == "" assert automation_exec_info.account == account - assert automation_exec_info.region == 'us-east-1' + assert automation_exec_info.region == "us-east-1" + def test_execid_parsing_sharr(mocker): """ Verifies correct operation for successful remediation """ - automation_exec_info = { - "AutomationExecutionId": "795cf453-c41a-48df-aace-fd68fdace188", - "DocumentName": "SHARR_Remediation_AFSBP_1.0.0_EC2.7", - "DocumentVersion": "1", - "AutomationExecutionStatus": "Success", - "ExecutionStartTime": "2021-05-06T15:16:06.718000-04:00", - "ExecutionEndTime": "2021-05-06T15:16:37.083000-04:00", - "ExecutedBy": "arn:aws:sts::111111111111:assumed-role/SO0111-SHARR-Orchestrator-Member_us-east-1/sechub_admin", - "LogFile": "", - "Outputs": { - "ParseInput.AffectedObject": [ - "{\"Type\":\"EBSEncryption\",\"Id\":\"EBSEncryptionByDefault\",\"OutputKey\":\"Remediation.Output\"}" - ], - "Remediation.Output": [ - "{\"message\":\"Encryption by default enabled on EBS\",\"status\":\"Success\"}" - ] - }, - "Mode": "Auto", - "Targets": [], - "ResolvedTargets": { - "ParameterValues": [], - "Truncated": False - }, - "AutomationType": "Local" - } - ssm_c = boto3.client('ssm') - account = '111111111111' - test_event['AutomationDocument']['AccountId'] = account - test_event['SSMExecution']['ExecId'] = '795cf453-c41a-48df-aace-fd68fdace188' - - expected_result = { - 'affected_object': 'UNKNOWN', - 'executionid': '795cf453-c41a-48df-aace-fd68fdace188', - 'logdata': '[]', - 'message': '{"ModifyAccount.EnableEbsEncryptionByDefaultResponse": ["{\\"EbsEncryptionByDefault\\":true,\\"ResponseMetadata\\":{\\"RequestId\\":\\"c45a9839-5a40-472e-ac83-d0058987948c\\",\\"HTTPStatusCode\\":200,\\"HTTPHeaders\\":{\\"x-amzn-requestid\\":\\"c45a9839-5a40-472e-ac83-d0058987948c\\",\\"cache-control\\":\\"no-cache, no-store\\",\\"strict-transport-security\\":\\"max-age\\\\u003d31536000; includeSubDomains\\",\\"content-type\\":\\"text/xml;charset\\\\u003dUTF-8\\",\\"transfer-encoding\\":\\"chunked\\",\\"vary\\":\\"accept-encoding\\",\\"date\\":\\"Thu, 06 May 2021 19:16:14 GMT\\",\\"server\\":\\"AmazonEC2\\"},\\"RetryAttempts\\":0}}"]}', - 'remediation_status': 'Success', - 'status': 'Success' - } + ssm_c = boto3.client("ssm") + account = "111111111111" + test_event["AutomationDocument"]["AccountId"] = account + test_event["SSMExecution"]["ExecId"] = "795cf453-c41a-48df-aace-fd68fdace188" ssmc_stub = Stubber(ssm_c) ssmc_stub.add_response( - 'describe_automation_executions', + "describe_automation_executions", ssm_mocked_good_response, - {'Filters': [{'Key': 'ExecutionId', 'Values': ['795cf453-c41a-48df-aace-fd68fdace188']}]} + { + "Filters": [ + { + "Key": "ExecutionId", + "Values": ["795cf453-c41a-48df-aace-fd68fdace188"], + } + ] + }, ) ssmc_stub.activate() - mocker.patch('check_ssm_execution._get_ssm_client', return_value=ssm_c) + mocker.patch("check_ssm_execution._get_ssm_client", return_value=ssm_c) automation_exec_info = AutomationExecution( - test_event['SSMExecution']['ExecId'], - account, - 'foo-bar-baz', - 'us-east-1' - ) - assert automation_exec_info.status == 'Success' - assert automation_exec_info.outputs == ssm_mocked_good_response['AutomationExecutionMetadataList'][0]['Outputs'] - assert automation_exec_info.failure_message == '' + test_event["SSMExecution"]["ExecId"], account, "foo-bar-baz", "us-east-1" + ) + assert automation_exec_info.status == "Success" + assert ( + automation_exec_info.outputs + == ssm_mocked_good_response["AutomationExecutionMetadataList"][0]["Outputs"] + ) + assert automation_exec_info.failure_message == "" assert automation_exec_info.account == account - assert automation_exec_info.region == 'us-east-1' + assert automation_exec_info.region == "us-east-1" ssmc_stub.deactivate() ssmc_stub.deactivate() + def test_missing_account_id(mocker): """ Verifies that system exit occurs when an account ID is missing from event """ - ssm_c = boto3.client('ssm') - test_event['SSMExecution']['ExecId'] = '5f12697a-70a5-4a64-83e6-b7d429ec2b17' - test_event['SSMExecution']['Account'] = None + ssm_c = boto3.client("ssm") + test_event["SSMExecution"]["ExecId"] = "5f12697a-70a5-4a64-83e6-b7d429ec2b17" + test_event["SSMExecution"]["Account"] = None ssmc_stub = Stubber(ssm_c) ssmc_stub.add_response( - 'describe_automation_executions', + "describe_automation_executions", ssm_mocked_good_response, - {'Filters': [{'Key': 'ExecutionId', 'Values': ['5f12697a-70a5-4a64-83e6-b7d429ec2b17']}]} + { + "Filters": [ + { + "Key": "ExecutionId", + "Values": ["5f12697a-70a5-4a64-83e6-b7d429ec2b17"], + } + ] + }, ) ssmc_stub.activate() - mocker.patch('check_ssm_execution._get_ssm_client', return_value=ssm_c) - mocker.patch('check_ssm_execution.Metrics.send_metrics', return_value=False) - mocker.patch('check_ssm_execution.Metrics.get_metrics_from_finding', return_value=False) - mocker.patch('check_ssm_execution.Metrics.__init__', return_value=None) + mocker.patch("check_ssm_execution._get_ssm_client", return_value=ssm_c) + mocker.patch("check_ssm_execution.Metrics.send_metrics", return_value=False) + mocker.patch( + "check_ssm_execution.Metrics.get_metrics_from_finding", return_value=False + ) + mocker.patch("check_ssm_execution.Metrics.__init__", return_value=None) with pytest.raises(SystemExit) as response: lambda_handler(test_event, {}) - assert response.value.code == 'ERROR: missing remediation account information. SSMExecution missing region or account.' + assert ( + response.value.code + == "ERROR: missing remediation account information. SSMExecution missing region or account." + ) ssmc_stub.deactivate() + def test_missing_region(mocker): """ Verifies that system exit occurs when region is missing """ - ssm_c = boto3.client('ssm') - test_event['SSMExecution']['ExecId'] = '5f12697a-70a5-4a64-83e6-b7d429ec2b17' - test_event['SSMExecution']['Region'] = None + ssm_c = boto3.client("ssm") + test_event["SSMExecution"]["ExecId"] = "5f12697a-70a5-4a64-83e6-b7d429ec2b17" + test_event["SSMExecution"]["Region"] = None ssmc_stub = Stubber(ssm_c) ssmc_stub.add_response( - 'describe_automation_executions', + "describe_automation_executions", ssm_mocked_good_response, - {'Filters': [{'Key': 'ExecutionId', 'Values': ['5f12697a-70a5-4a64-83e6-b7d429ec2b17']}]} + { + "Filters": [ + { + "Key": "ExecutionId", + "Values": ["5f12697a-70a5-4a64-83e6-b7d429ec2b17"], + } + ] + }, ) ssmc_stub.activate() - mocker.patch('check_ssm_execution._get_ssm_client', return_value=ssm_c) - mocker.patch('check_ssm_execution.Metrics.send_metrics', return_value=False) - mocker.patch('check_ssm_execution.Metrics.get_metrics_from_finding', return_value=False) - mocker.patch('check_ssm_execution.Metrics.__init__', return_value=None) + mocker.patch("check_ssm_execution._get_ssm_client", return_value=ssm_c) + mocker.patch("check_ssm_execution.Metrics.send_metrics", return_value=False) + mocker.patch( + "check_ssm_execution.Metrics.get_metrics_from_finding", return_value=False + ) + mocker.patch("check_ssm_execution.Metrics.__init__", return_value=None) with pytest.raises(SystemExit) as response: lambda_handler(test_event, {}) - assert response.value.code == 'ERROR: missing remediation account information. SSMExecution missing region or account.' + assert ( + response.value.code + == "ERROR: missing remediation account information. SSMExecution missing region or account." + ) - ssmc_stub.deactivate() \ No newline at end of file + ssmc_stub.deactivate() diff --git a/source/Orchestrator/test/test_data/notifier.json b/source/Orchestrator/test/test_data/notifier.json index 32a1a487..a8fd7894 100644 --- a/source/Orchestrator/test/test_data/notifier.json +++ b/source/Orchestrator/test/test_data/notifier.json @@ -3,7 +3,7 @@ "Details": [ "{\"ExecutionLog\":\"Snapshot snap-00004115408c25b92 permissions set to private\\nSnapshot snap-00003bd4b6a40277c permissions set to private\\n\",\"Payload\":[\"snap-00004115408c25b92\",\"snap-00003bd4b6a40277c\"]}" ], - "Message": "Remediation succeeded for AFSBP control EC2.1 in account 111111111111: Remediation for ['111111111111'] status: Success", + "Message": "Remediation succeeded for AWS FSBP control EC2.1 in account 111111111111: Remediation for ['111111111111'] status: Success", "State": "SUCCESS" }, "Metrics": { diff --git a/source/Orchestrator/test/test_exec_ssm_doc.py b/source/Orchestrator/test/test_exec_ssm_doc.py index 1d7815ca..2beb14a4 100644 --- a/source/Orchestrator/test/test_exec_ssm_doc.py +++ b/source/Orchestrator/test/test_exec_ssm_doc.py @@ -4,20 +4,18 @@ Unit Test: exec_ssm_doc.py Run from /deployment/temp/source/Orchestrator after running build-s3-dist.sh """ +from typing import Any -import os -import pytest import boto3 -from botocore.stub import Stubber, ANY +from botocore.stub import ANY, Stubber from exec_ssm_doc import lambda_handler -from pytest_mock import mocker def test_exec_runbook(mocker): """ Verifies correct operation on success """ - step_input = { + step_input: dict[str, Any] = { "EventType": "Security Hub Findings - Custom Action", "Finding": { "SchemaVersion": "2018-10-08", diff --git a/source/Orchestrator/test/test_get_approval_requirement.py b/source/Orchestrator/test/test_get_approval_requirement.py index 723b43e2..0d7ae9fd 100644 --- a/source/Orchestrator/test/test_get_approval_requirement.py +++ b/source/Orchestrator/test/test_get_approval_requirement.py @@ -4,21 +4,24 @@ Unit Test: exec_ssm_doc.py Run from /deployment/temp/source/Orchestrator after running build-s3-dist.sh """ - import os + import pytest -import boto3 -from botocore.stub import Stubber, ANY -from get_approval_requirement import lambda_handler, get_running_account -from awsapi_cached_client import AWSCachedClient -from pytest_mock import mocker +from botocore.stub import Stubber +from get_approval_requirement import lambda_handler +from layer.awsapi_cached_client import AWSCachedClient + def get_region(): - return os.getenv('AWS_DEFAULT_REGION') + return os.getenv("AWS_DEFAULT_REGION") + @pytest.fixture(autouse=True) def mock_get_running_account(mocker): - mocker.patch('get_approval_requirement.get_running_account', return_value='111111111111') + mocker.patch( + "get_approval_requirement.get_running_account", return_value="111111111111" + ) + def step_input(): return { @@ -40,14 +43,14 @@ def step_input(): "Product": 0, "Label": "INFORMATIONAL", "Normalized": 0, - "Original": "INFORMATIONAL" + "Original": "INFORMATIONAL", }, "Title": "AutoScaling.1 Auto scaling groups associated with a load balancer should use load balancer health checks", "Description": "This control checks whether your Auto Scaling groups that are associated with a load balancer are using Elastic Load Balancing health checks.", "Remediation": { "Recommendation": { "Text": "For directions on how to fix this issue, please consult the AWS Security Hub Foundational Security Best Practices documentation.", - "Url": "https://docs.aws.amazon.com/console/securityhub/AutoScaling.1/remediation" + "Url": "https://docs.aws.amazon.com/console/securityhub/AutoScaling.1/remediation", } }, "ProductFields": { @@ -61,14 +64,14 @@ def step_input(): "aws/securityhub/ProductName": "Security Hub", "aws/securityhub/CompanyName": "AWS", "aws/securityhub/annotation": "AWS Config evaluated your resources against the rule. The rule did not apply to the AWS resources in its scope, the specified resources were deleted, or the evaluation results were deleted.", - "aws/securityhub/FindingId": "arn:aws:securityhub:us-east-1::product/aws/securityhub/arn:aws:securityhub:us-east-1:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0/AutoScaling.1/finding/635ceb5d-3dfd-4458-804e-48a42cd723e4" + "aws/securityhub/FindingId": "arn:aws:securityhub:us-east-1::product/aws/securityhub/arn:aws:securityhub:us-east-1:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0/AutoScaling.1/finding/635ceb5d-3dfd-4458-804e-48a42cd723e4", }, "Resources": [ { "Type": "AwsAccount", "Id": "arn:aws:autoscaling:us-east-1:111111111111:autoScalingGroup:785df3481e1-cd66-435d-96de-d6ed5416defd:autoScalingGroupName/sharr-test-autoscaling-1", "Partition": "aws", - "Region": "us-east-1" + "Region": "us-east-1", } ], "Compliance": { @@ -76,53 +79,94 @@ def step_input(): "StatusReasons": [ { "ReasonCode": "CONFIG_EVALUATIONS_EMPTY", - "Description": "AWS Config evaluated your resources against the rule. The rule did not apply to the AWS resources in its scope, the specified resources were deleted, or the evaluation results were deleted." + "Description": "AWS Config evaluated your resources against the rule. The rule did not apply to the AWS resources in its scope, the specified resources were deleted, or the evaluation results were deleted.", } - ] + ], }, "WorkflowState": "NEW", - "Workflow": { - "Status": "NEW" - }, - "RecordState": "ACTIVE" + "Workflow": {"Status": "NEW"}, + "RecordState": "ACTIVE", }, "AutomationDocument": { "DocState": "ACTIVE", "SecurityStandardVersion": "1.0.0", "AccountId": "111111111111", - "Message": "Document Status is not \"Active\": unknown", + "Message": 'Document Status is not "Active": unknown', "AutomationDocId": "ASR-AFSBP_1.0.0_AutoScaling.1", "RemediationRole": "SO0111-Remediate-AFSBP-1.0.0-AutoScaling.1", "ControlId": "AutoScaling.1", "SecurityStandard": "AFSBP", - "SecurityStandardSupported": "True" + "SecurityStandardSupported": "True", }, } + +def step_input_config(): + return { + "EventType": "Security Hub Findings - Custom Action", + "Finding": { + "ProductArn": "arn:aws:securityhub:us-east-1::product/aws/config", + "Types": ["Software and Configuration Checks"], + "Description": "This finding is created for a resource compliance change for config rule: test-config-rule-policy", + "SchemaVersion": "2018-10-08", + "Compliance": {"Status": "FAILED"}, + "GeneratorId": "arn:aws:config:us-east-1:111111111111:config-rule/config-rule-k5r9xw", + "CreatedAt": "2023-10-26T20:26:06.736Z", + "RecordState": "ACTIVE", + "Title": "ConfigRuleName", + "Workflow": {"Status": "NOTIFIED"}, + "Severity": {"Normalized": 40, "Label": "MEDIUM"}, + "UpdatedAt": "2023-10-26T20:26:06.736Z", + "CompanyName": "AWS", + "FindingProviderFields": { + "Types": ["Software and Configuration Checks"], + "Severity": {"Normalized": 40, "Label": "MEDIUM"}, + }, + "WorkflowState": "NEW", + "ProductFields": { + "aws/securityhub/ProductName": "Config", + "aws/securityhub/CompanyName": "AWS", + "aws/securityhub/FindingId": "arn:aws:securityhub:us-east-1::product/aws/config/arn:aws:config:us-east-1:111111111111:config-rule/config-rule-k5r9xw/finding/3027db7f9b58b5ff20354bc654f0ad706cf70d1a", + "aws/config/ConfigRuleArn": "arn:aws:config:us-east-1:111111111111:config-rule/config-rule-k5r9xw", + "aws/config/ConfigRuleName": "test-config-rule-policy", + "aws/config/ConfigComplianceType": "NON_COMPLIANT", + }, + "AwsAccountId": "111111111111", + "Region": "us-east-1", + "Id": "arn:aws:config:us-east-1:111111111111:config-rule/config-rule-k5r9xw/finding/3027db7f9b58b5ff20354bc654f0ad706cf70d1a", + "Resources": [ + { + "Partition": "aws", + "Type": "Other", + "Region": "us-east-1", + "Id": "AWS::::Account:111111111111", + } + ], + }, + } + + def test_get_approval_req(mocker): """ Verifies that it returns the fanout runbook name """ - os.environ['WORKFLOW_RUNBOOK'] = 'ASR-RunWorkflow' - os.environ['WORKFLOW_RUNBOOK_ACCOUNT'] = 'member' + os.environ["WORKFLOW_RUNBOOK"] = "ASR-RunWorkflow" + os.environ["WORKFLOW_RUNBOOK_ACCOUNT"] = "member" expected_result = { - 'workflowdoc': "ASR-RunWorkflow", - 'workflowaccount': '111111111111', - 'workflowrole': '', - 'workflow_data': { - 'impact': 'nondestructive', - 'approvalrequired': 'false' - } + "workflowdoc": "ASR-RunWorkflow", + "workflowaccount": "111111111111", + "workflowrole": "", + "workflow_data": {"impact": "nondestructive", "approvalrequired": "false"}, } AWS = AWSCachedClient(get_region()) - account = '111111111111' - step_input()['AutomationDocument']['AccountId'] = account + account = "111111111111" + step_input()["AutomationDocument"]["AccountId"] = account - ssm_c = AWS.get_connection('ssm') + ssm_c = AWS.get_connection("ssm") ssmc_stub = Stubber(ssm_c) ssmc_stub.add_response( - 'get_parameter', + "get_parameter", { "Parameter": { "Name": "/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0/shortname", @@ -131,18 +175,16 @@ def test_get_approval_req(mocker): "Version": 1, "LastModifiedDate": "2021-05-11T08:21:43.794000-04:00", "ARN": "arn:aws:ssm:us-east-1:111111111111:parameter/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0/shortname", - "DataType": "text" + "DataType": "text", } - },{ + }, + { "Name": "/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0/shortname" - } - ) - ssmc_stub.add_client_error( - 'get_parameter', - 'ParameterNotFound' + }, ) + ssmc_stub.add_client_error("get_parameter", "ParameterNotFound") ssmc_stub.add_response( - 'get_parameter', + "get_parameter", { "Parameter": { "Name": "/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0", @@ -151,12 +193,12 @@ def test_get_approval_req(mocker): "Version": 1, "LastModifiedDate": "2021-05-11T08:21:44.632000-04:00", "ARN": "arn:aws:ssm:us-east-1:111111111111:parameter/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0", - "DataType": "text" + "DataType": "text", } - } + }, ) ssmc_stub.add_response( - 'describe_document', + "describe_document", { "Document": { "Hash": "be480c5a8771035918c439a0c76e1471306a699b7f275fe7e0bea70903dc569a", @@ -172,66 +214,59 @@ def test_get_approval_req(mocker): "Name": "AutomationAssumeRole", "Type": "String", "Description": "(Optional) The ARN of the role that allows Automation to perform the actions on your behalf.", - "DefaultValue": "" + "DefaultValue": "", }, { "Name": "Finding", "Type": "StringMap", - "Description": "The input from Step function for ASG1 finding" - } - ], - "PlatformTypes": [ - "Windows", - "Linux", - "MacOS" + "Description": "The input from Step function for ASG1 finding", + }, ], + "PlatformTypes": ["Windows", "Linux", "MacOS"], "DocumentType": "Automation", "SchemaVersion": "0.3", "LatestVersion": "1", "DefaultVersion": "1", "DocumentFormat": "JSON", - "Tags": [] + "Tags": [], } - },{ - "Name": "ASR-RunWorkflow" - } + }, + {"Name": "ASR-RunWorkflow"}, ) ssmc_stub.activate() - mocker.patch('get_approval_requirement._get_ssm_client', return_value=ssm_c) + mocker.patch("get_approval_requirement._get_ssm_client", return_value=ssm_c) response = lambda_handler(step_input(), {}) - assert response['workflow_data'] == expected_result['workflow_data'] - assert response['workflowdoc'] == expected_result['workflowdoc'] - assert response['workflowaccount'] == expected_result['workflowaccount'] - assert response['workflowrole'] == expected_result['workflowrole'] + assert response["workflow_data"] == expected_result["workflow_data"] + assert response["workflowdoc"] == expected_result["workflowdoc"] + assert response["workflowaccount"] == expected_result["workflowaccount"] + assert response["workflowrole"] == expected_result["workflowrole"] ssmc_stub.deactivate() + def test_get_approval_req_no_fanout(mocker): """ Verifies that it does not return workflow_status at all """ - os.environ['WORKFLOW_RUNBOOK'] = '' + os.environ["WORKFLOW_RUNBOOK"] = "" expected_result = { - 'workflowdoc': "", - 'workflowaccount': '', - 'workflowrole': '', - 'workflow_data': { - 'impact': 'nondestructive', - 'approvalrequired': 'false' - } + "workflowdoc": "", + "workflowaccount": "", + "workflowrole": "", + "workflow_data": {"impact": "nondestructive", "approvalrequired": "false"}, } AWS = AWSCachedClient(get_region()) - account = '111111111111' - step_input()['AutomationDocument']['AccountId'] = account + account = "111111111111" + step_input()["AutomationDocument"]["AccountId"] = account - ssm_c = AWS.get_connection('ssm') + ssm_c = AWS.get_connection("ssm") ssmc_stub = Stubber(ssm_c) ssmc_stub.add_response( - 'get_parameter', + "get_parameter", { "Parameter": { "Name": "/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0/shortname", @@ -240,18 +275,16 @@ def test_get_approval_req_no_fanout(mocker): "Version": 1, "LastModifiedDate": "2021-05-11T08:21:43.794000-04:00", "ARN": "arn:aws:ssm:us-east-1:111111111111:parameter/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0/shortname", - "DataType": "text" + "DataType": "text", } - },{ + }, + { "Name": "/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0/shortname" - } - ) - ssmc_stub.add_client_error( - 'get_parameter', - 'ParameterNotFound' + }, ) + ssmc_stub.add_client_error("get_parameter", "ParameterNotFound") ssmc_stub.add_response( - 'get_parameter', + "get_parameter", { "Parameter": { "Name": "/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0", @@ -260,12 +293,12 @@ def test_get_approval_req_no_fanout(mocker): "Version": 1, "LastModifiedDate": "2021-05-11T08:21:44.632000-04:00", "ARN": "arn:aws:ssm:us-east-1:111111111111:parameter/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0", - "DataType": "text" + "DataType": "text", } - } + }, ) ssmc_stub.add_response( - 'describe_document', + "describe_document", { "Document": { "Hash": "be480c5a8771035918c439a0c76e1471306a699b7f275fe7e0bea70903dc569a", @@ -281,70 +314,63 @@ def test_get_approval_req_no_fanout(mocker): "Name": "AutomationAssumeRole", "Type": "String", "Description": "(Optional) The ARN of the role that allows Automation to perform the actions on your behalf.", - "DefaultValue": "" + "DefaultValue": "", }, { "Name": "Finding", "Type": "StringMap", - "Description": "The input from Step function for ASG1 finding" - } - ], - "PlatformTypes": [ - "Windows", - "Linux", - "MacOS" + "Description": "The input from Step function for ASG1 finding", + }, ], + "PlatformTypes": ["Windows", "Linux", "MacOS"], "DocumentType": "Automation", "SchemaVersion": "0.3", "LatestVersion": "1", "DefaultVersion": "1", "DocumentFormat": "JSON", - "Tags": [] + "Tags": [], } - },{ - "Name": "ASR-RunWorkflow" - } + }, + {"Name": "ASR-RunWorkflow"}, ) ssmc_stub.activate() - mocker.patch('get_approval_requirement._get_ssm_client', return_value=ssm_c) + mocker.patch("get_approval_requirement._get_ssm_client", return_value=ssm_c) response = lambda_handler(step_input(), {}) print(response) - assert response['workflow_data'] == expected_result['workflow_data'] - assert response['workflowdoc'] == expected_result['workflowdoc'] - assert response['workflowaccount'] == expected_result['workflowaccount'] - assert response['workflowrole'] == expected_result['workflowrole'] + assert response["workflow_data"] == expected_result["workflow_data"] + assert response["workflowdoc"] == expected_result["workflowdoc"] + assert response["workflowaccount"] == expected_result["workflowaccount"] + assert response["workflowrole"] == expected_result["workflowrole"] ssmc_stub.deactivate() -#================================================================================== + +# ================================================================================== def test_workflow_in_admin(mocker): """ Verifies that it returns the fanout runbook name """ - os.environ['WORKFLOW_RUNBOOK'] = 'ASR-RunWorkflow' - os.environ['WORKFLOW_RUNBOOK_ACCOUNT'] = 'admin' - os.environ['WORKFLOW_RUNBOOK_ROLE'] = 'someotheriamrole' + os.environ["WORKFLOW_RUNBOOK"] = "ASR-RunWorkflow" + os.environ["WORKFLOW_RUNBOOK_ACCOUNT"] = "admin" + os.environ["WORKFLOW_RUNBOOK_ROLE"] = "someotheriamrole" expected_result = { - 'workflowdoc': "ASR-RunWorkflow", - 'workflowaccount': '111111111111', - 'workflowrole': 'someotheriamrole', - 'workflow_data': { - 'impact': 'nondestructive', - 'approvalrequired': 'false' - } + "workflowdoc": "ASR-RunWorkflow", + "workflowaccount": "111111111111", + "workflowrole": "someotheriamrole", + "workflow_data": {"impact": "nondestructive", "approvalrequired": "false"}, } AWS = AWSCachedClient(get_region()) - account = '111111111111' - step_input()['AutomationDocument']['AccountId'] = account + account = "111111111111" + step_input()["AutomationDocument"]["AccountId"] = account - ssm_c = AWS.get_connection('ssm') + ssm_c = AWS.get_connection("ssm") ssmc_stub = Stubber(ssm_c) ssmc_stub.add_response( - 'get_parameter', + "get_parameter", { "Parameter": { "Name": "/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0/shortname", @@ -353,18 +379,16 @@ def test_workflow_in_admin(mocker): "Version": 1, "LastModifiedDate": "2021-05-11T08:21:43.794000-04:00", "ARN": "arn:aws:ssm:us-east-1:111111111111:parameter/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0/shortname", - "DataType": "text" + "DataType": "text", } - },{ + }, + { "Name": "/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0/shortname" - } - ) - ssmc_stub.add_client_error( - 'get_parameter', - 'ParameterNotFound' + }, ) + ssmc_stub.add_client_error("get_parameter", "ParameterNotFound") ssmc_stub.add_response( - 'get_parameter', + "get_parameter", { "Parameter": { "Name": "/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0", @@ -373,12 +397,12 @@ def test_workflow_in_admin(mocker): "Version": 1, "LastModifiedDate": "2021-05-11T08:21:44.632000-04:00", "ARN": "arn:aws:ssm:us-east-1:111111111111:parameter/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0", - "DataType": "text" + "DataType": "text", } - } + }, ) ssmc_stub.add_response( - 'describe_document', + "describe_document", { "Document": { "Hash": "be480c5a8771035918c439a0c76e1471306a699b7f275fe7e0bea70903dc569a", @@ -394,39 +418,289 @@ def test_workflow_in_admin(mocker): "Name": "AutomationAssumeRole", "Type": "String", "Description": "(Optional) The ARN of the role that allows Automation to perform the actions on your behalf.", - "DefaultValue": "" + "DefaultValue": "", }, { "Name": "Finding", "Type": "StringMap", - "Description": "The input from Step function for ASG1 finding" - } - ], - "PlatformTypes": [ - "Windows", - "Linux", - "MacOS" + "Description": "The input from Step function for ASG1 finding", + }, ], + "PlatformTypes": ["Windows", "Linux", "MacOS"], "DocumentType": "Automation", "SchemaVersion": "0.3", "LatestVersion": "1", "DefaultVersion": "1", "DocumentFormat": "JSON", - "Tags": [] + "Tags": [], } - },{ - "Name": "ASR-RunWorkflow" - } + }, + {"Name": "ASR-RunWorkflow"}, ) ssmc_stub.activate() - mocker.patch('get_approval_requirement._get_ssm_client', return_value=ssm_c) + mocker.patch("get_approval_requirement._get_ssm_client", return_value=ssm_c) response = lambda_handler(step_input(), {}) print(response) - assert response['workflow_data'] == expected_result['workflow_data'] - assert response['workflowdoc'] == expected_result['workflowdoc'] - assert response['workflowaccount'] == expected_result['workflowaccount'] - assert response['workflowrole'] == expected_result['workflowrole'] + assert response["workflow_data"] == expected_result["workflow_data"] + assert response["workflowdoc"] == expected_result["workflowdoc"] + assert response["workflowaccount"] == expected_result["workflowaccount"] + assert response["workflowrole"] == expected_result["workflowrole"] + + ssmc_stub.deactivate() + + +def test_get_approval_config(mocker): + """ + Verifies that config runbooks defined are set as expected + """ + os.environ["WORKFLOW_RUNBOOK"] = "" + expected_result = { + "workflowdoc": "ASR-TestConfigDoc", + "workflowrole": "ASR-TestRole", + "workflow_data": { + "impact": "nondestructive", + "approvalrequired": "false", + "security_hub": "false", + }, + } + + AWS = AWSCachedClient(get_region()) + + ssm_c = AWS.get_connection("ssm") + ssmc_stub = Stubber(ssm_c) + + ssmc_stub.add_response( + "get_parameter", + { + "Parameter": { + "Name": "/Solutions/SO0111/ConfigRuleName", + "Type": "String", + "Value": '{"RunbookName":"ASR-TestConfigDoc","RunbookRole":"ASR-TestRole"}', + "Version": 1, + "LastModifiedDate": "2021-05-11T08:21:43.794000-04:00", + "ARN": "arn:aws:ssm:us-east-1:111111111111:parameter/Solutions/SO0111/ConfigRuleName", + "DataType": "text", + } + }, + {"Name": "/Solutions/SO0111/ConfigRuleName"}, + ) + + ssmc_stub.activate() + mocker.patch("boto3.client", return_value=ssm_c) + + response = lambda_handler(step_input_config(), {}) + + assert response["workflow_data"] == expected_result["workflow_data"] + assert response["workflowdoc"] == expected_result["workflowdoc"] + assert response["workflowrole"] == expected_result["workflowrole"] + + ssmc_stub.deactivate() + + +def test_get_approval_config_no_role(mocker): + """ + Verifies that config runbooks with no roles defined are set as expected + """ + os.environ["WORKFLOW_RUNBOOK"] = "" + expected_result = { + "workflowdoc": "ASR-TestConfigDoc", + "workflowrole": "", + "workflow_data": { + "impact": "nondestructive", + "approvalrequired": "false", + "security_hub": "false", + }, + } + + AWS = AWSCachedClient(get_region()) + + ssm_c = AWS.get_connection("ssm") + ssmc_stub = Stubber(ssm_c) + + ssmc_stub.add_response( + "get_parameter", + { + "Parameter": { + "Name": "/Solutions/SO0111/ConfigRuleName", + "Type": "String", + "Value": '{"RunbookName":"ASR-TestConfigDoc"}', + "Version": 1, + "LastModifiedDate": "2021-05-11T08:21:43.794000-04:00", + "ARN": "arn:aws:ssm:us-east-1:111111111111:parameter/Solutions/SO0111/ConfigRuleName", + "DataType": "text", + } + }, + {"Name": "/Solutions/SO0111/ConfigRuleName"}, + ) + + ssmc_stub.activate() + mocker.patch("boto3.client", return_value=ssm_c) + + response = lambda_handler(step_input_config(), {}) + + assert response["workflow_data"] == expected_result["workflow_data"] + assert response["workflowdoc"] == expected_result["workflowdoc"] + assert response["workflowrole"] == expected_result["workflowrole"] + + ssmc_stub.deactivate() + + +def test_get_approval_health(mocker): + """ + Verifies that health runbooks get run as expected + """ + os.environ["WORKFLOW_RUNBOOK"] = "" + expected_result = { + "workflowdoc": "ASR-TestConfigDoc", + "workflowrole": "ASR-TestRole", + "workflow_data": { + "impact": "nondestructive", + "approvalrequired": "false", + "security_hub": "false", + }, + } + + AWS = AWSCachedClient(get_region()) + step_input_health = step_input_config() + step_input_health["Finding"]["ProductFields"][ + "aws/securityhub/ProductName" + ] = "Health" + step_input_health["Finding"]["GeneratorId"] = "HealthRuleName" + + ssm_c = AWS.get_connection("ssm") + ssmc_stub = Stubber(ssm_c) + + ssmc_stub.add_response( + "get_parameter", + { + "Parameter": { + "Name": "/Solutions/SO0111/HealthRuleName", + "Type": "String", + "Value": '{"RunbookName":"ASR-TestConfigDoc","RunbookRole":"ASR-TestRole"}', + "Version": 1, + "LastModifiedDate": "2021-05-11T08:21:43.794000-04:00", + "ARN": "arn:aws:ssm:us-east-1:111111111111:parameter/Solutions/SO0111/HealthRuleName", + "DataType": "text", + } + }, + {"Name": "/Solutions/SO0111/HealthRuleName"}, + ) + + ssmc_stub.activate() + mocker.patch("boto3.client", return_value=ssm_c) + response = lambda_handler(step_input_health, {}) + + assert response["workflow_data"] == expected_result["workflow_data"] + assert response["workflowdoc"] == expected_result["workflowdoc"] + assert response["workflowrole"] == expected_result["workflowrole"] + + ssmc_stub.deactivate() + + +def test_get_approval_guardduty(mocker): + """ + Verifies that it returns the fanout runbook name + """ + os.environ["WORKFLOW_RUNBOOK"] = "" + expected_result = { + "workflowdoc": "ASR-TestConfigDoc", + "workflowrole": "ASR-TestRole", + "workflow_data": { + "impact": "nondestructive", + "approvalrequired": "false", + "security_hub": "false", + }, + } + + AWS = AWSCachedClient(get_region()) + step_input_guardduty = step_input_config() + step_input_guardduty["Finding"]["ProductFields"][ + "aws/securityhub/ProductName" + ] = "GuardDuty" + step_input_guardduty["Finding"]["Types"] = [ + "Effects/Data Exposure/Policy:S3-BucketBlockPublicAccessDisabled" + ] + + ssm_c = AWS.get_connection("ssm") + ssmc_stub = Stubber(ssm_c) + + ssmc_stub.add_response( + "get_parameter", + { + "Parameter": { + "Name": "/Solutions/SO0111/BucketBlockPublicAccessDisabled", + "Type": "String", + "Value": '{"RunbookName":"ASR-TestConfigDoc","RunbookRole":"ASR-TestRole"}', + "Version": 1, + "LastModifiedDate": "2021-05-11T08:21:43.794000-04:00", + "ARN": "arn:aws:ssm:us-east-1:111111111111:parameter/Solutions/SO0111/BucketBlockPublicAccessDisabled", + "DataType": "text", + } + }, + {"Name": "/Solutions/SO0111/BucketBlockPublicAccessDisabled"}, + ) + + ssmc_stub.activate() + mocker.patch("boto3.client", return_value=ssm_c) + + response = lambda_handler(step_input_guardduty, {}) + + assert response["workflow_data"] == expected_result["workflow_data"] + assert response["workflowdoc"] == expected_result["workflowdoc"] + assert response["workflowrole"] == expected_result["workflowrole"] + + ssmc_stub.deactivate() + + +def test_get_approval_inspector(mocker): + """ + Verifies that it returns the fanout runbook name + """ + os.environ["WORKFLOW_RUNBOOK"] = "" + expected_result = { + "workflowdoc": "ASR-TestConfigDoc", + "workflowrole": "ASR-TestRole", + "workflow_data": { + "impact": "nondestructive", + "approvalrequired": "false", + "security_hub": "false", + }, + } + + AWS = AWSCachedClient(get_region()) + step_input_inspector = step_input_config() + step_input_inspector["Finding"]["ProductFields"] = { + "aws/securityhub/ProductName": "Inspector", + "attributes/RULE_TYPE": "InspectorRuleName", + } + + ssm_c = AWS.get_connection("ssm") + ssmc_stub = Stubber(ssm_c) + + ssmc_stub.add_response( + "get_parameter", + { + "Parameter": { + "Name": "/Solutions/SO0111/InspectorRuleName", + "Type": "String", + "Value": '{"RunbookName":"ASR-TestConfigDoc","RunbookRole":"ASR-TestRole"}', + "Version": 1, + "LastModifiedDate": "2021-05-11T08:21:43.794000-04:00", + "ARN": "arn:aws:ssm:us-east-1:111111111111:parameter/Solutions/SO0111/InspectorRuleName", + "DataType": "text", + } + }, + {"Name": "/Solutions/SO0111/InspectorRuleName"}, + ) + + ssmc_stub.activate() + mocker.patch("boto3.client", return_value=ssm_c) + response = lambda_handler(step_input_inspector, {}) + + assert response["workflow_data"] == expected_result["workflow_data"] + assert response["workflowdoc"] == expected_result["workflowdoc"] + assert response["workflowrole"] == expected_result["workflowrole"] ssmc_stub.deactivate() diff --git a/source/Orchestrator/test/test_schedule_remediation.py b/source/Orchestrator/test/test_schedule_remediation.py new file mode 100644 index 00000000..9d624e2c --- /dev/null +++ b/source/Orchestrator/test/test_schedule_remediation.py @@ -0,0 +1,276 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Unit Test: schedule_remediation.py +Run from /deployment/temp/source/Orchestrator after running build-s3-dist.sh +""" +import json +import os +from datetime import datetime, timezone +from unittest.mock import patch + +import boto3 +from botocore.config import Config +from botocore.stub import Stubber +from moto import mock_aws +from schedule_remediation import lambda_handler + +os.environ["SchedulingTableName"] = "TestTable" +os.environ["RemediationWaitTime"] = "3" +timestampFormat = "%Y-%m-%dT%H:%M:%SZ" + +client = "boto3.client" + +BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + +event = { + "Records": [ + { + "messageId": "21dbdb16-cc07-404c-ae56-32b619200719", + "receiptHandle": "AQEB4Pirw/FVegbE1FRgLIUxac/EI2ihP+i/FVEp2bUE8PSbAKD3B3NZLXgJwIcjb73qa3OtT2mJ4jolFR7suHSQbSMdXR06axd66tpllyV+eAsSwlAihQuk56iZbBrW/nDFpES/Eb3K4AYssUtG3Uf++abdt2b4lPzImS4XW5GIuEQmYU4e22QhgDEhp0GC4Np5JigJeTFvWD9yuljgWOhlVAixmT7oy0GZwVrPchcgBN5pcoRwlRNHI6TmX8/FnEvv1UwC7KvhfywmWEGxx4TAGmO5aZu4ZKLXKIb0QiJXqRpwZV2yCRSayrV91DXqnyzTve7tUrX1Dp8yfA9AFxvNMMFgx3MxnRSKrusKzDjWI1BI6P/Y/99VXzTtPlYFfXqJgwiHEjMnIgSqdHvN8CYFMlZtsw0rbMRPnQzzIzhW6SQqAuDWfNrwy47Q8w8vcVri", + "body": '{"ResourceRegion":"us-east-1","executionId":"arn:aws:states:us-east-1:111111111:execution:SO0111-SHARR-Orchestrator:b24425f3-d43c-6ba7-c977-8fcbbc819025_24b1b193-c9cb-dfa0-2d3b-e1a8f4479dcc","AccountId":"111111111","RemediationDetails":{"Test":"Test"},"TaskToken":"AQCEAAAAKgAAAAMAAAAAAAAAAY9E4q9gMWM6RtheL2A6dR2HfHYkPARJ/WNksPpfUbAliiuAGWorrgnlcIfDhUbTHyVE6tqDdJoJ9Vn1dn+lW1TaOCrtKRSakPcFxgTXMv608Q==eTSQnxCMco7P/0vqpsQXNPo4Oi49f1WJrXvgG19gYNUl4x/hMRj80tbwPAgZ71wo7DImNgB+HYwROReYNJx8xcWlOe5O0EvJdCR9/KQvL/R0ESV83DSuKhtg3UU5uPicXz/FM/YZQbvEBRjCHAp+PdzNIqFuPQ09RweRQLhUVQjkVrQD87++xfK/z7lzGmubPZkyTQDdmLtpOJMBwGbFKfBxanbbC8r0pHYp05HI5dTbwuyNv+s/Kmu+EbVM4S5iBhqMGfbnoEk1sFnsU24ZY1NE/wbEsgkWBJZYdKxrp2S0DntD4fFD7CReZ7CXvAbfWoUYlTFnm9gV6oRZ/PaVPN/+/gKXF/wmOa0aYG8uLw1M63nRnfmfeEzOWuzxZk+VNQcXFLvITYLgLTLT63T4lCUmiJ7G/nuVKzhYfcI8D0wOc1/2fx5QFc4VCT0mELrLvyMnQMyd1mGjYVfIYFOA/ZPJwOgK94laxuNjhm6GeLIMsMEUQbnRcCY2537B2Q2+4Le0a1IMVy42uR2k4NE6"}', + "attributes": { + "ApproximateReceiveCount": "1", + "SentTimestamp": "1696523461711", + "SenderId": "", + "ApproximateFirstReceiveTimestamp": "1696523461713", + }, + "messageAttributes": {}, + "md5OfBody": "8834e454e4e0a22f7259a1cb0bcc66ce", + "eventSource": "aws:sqs", + "eventSourceARN": "arn:aws:sqs:us-east-1:111111111:asr-admin-stack1-SchedulingQueueB533E3CD-MGicwhGIVhFy", + "awsRegion": "us-east-1", + } + ] +} + +record = event["Records"][0] +body = json.loads(record["body"]) # type: ignore [arg-type] +remediation_details = body["RemediationDetails"] + +get_item_parameters = { + "TableName": "TestTable", + "Key": {"AccountID-Region": {"S": f"{body['AccountId']}-{body['ResourceRegion']}"}}, +} + +table_key = f"{body['AccountId']}-{body['ResourceRegion']}" +table_name = os.environ.get("SchedulingTableName") + + +def create_table(): + boto3.client("dynamodb").create_table( + AttributeDefinitions=[ + {"AttributeName": "AccountID-Region", "AttributeType": "S"} + ], + TableName=table_name, + KeySchema=[{"AttributeName": "AccountID-Region", "KeyType": "HASH"}], + BillingMode="PAY_PER_REQUEST", + ) + + +@mock_aws +def test_new_account_remediation(mocker): + dynamodb_client = boto3.client("dynamodb", config=BOTO_CONFIG) + sfn_client = boto3.client("stepfunctions", config=BOTO_CONFIG) + sfn_stub = Stubber(sfn_client) + clients = {"dynamodb": dynamodb_client, "stepfunctions": sfn_client} + create_table() + + current_timestamp = int(datetime.now(timezone.utc).timestamp()) + + current_timestamp_string = datetime.fromtimestamp( + current_timestamp, timezone.utc + ).strftime(timestampFormat) + + output = {"PlannedTimestamp": current_timestamp_string} + + output.update(remediation_details) + + sfn_stub.add_response( + "send_task_success", + {}, + {"taskToken": body["TaskToken"], "output": json.dumps(output)}, + ) + + sfn_stub.activate() + with patch(client, side_effect=lambda service, **_: clients[service]): + response = lambda_handler(event, {}) + final_item = dynamodb_client.get_item( + TableName=table_name, Key={"AccountID-Region": {"S": table_key}} + ) + assert final_item["Item"]["LastExecutedTimestamp"]["S"] == str( + current_timestamp + ) + assert ( + response + == f"Remediation scheduled to execute at {current_timestamp_string}" + ) + + sfn_stub.deactivate() + + +@mock_aws +def test_no_recent_remediation(mocker): + dynamodb_client = boto3.client("dynamodb", config=BOTO_CONFIG) + sfn_client = boto3.client("stepfunctions", config=BOTO_CONFIG) + sfn_stub = Stubber(sfn_client) + clients = {"dynamodb": dynamodb_client, "stepfunctions": sfn_client} + create_table() + current_timestamp = int(datetime.now(timezone.utc).timestamp()) + + found_timestamp = current_timestamp - 10 + + dynamodb_client.put_item( + TableName=table_name, + Item={ + "AccountID-Region": {"S": table_key}, + "LastExecutedTimestamp": {"S": str(found_timestamp)}, + }, + ) + + current_timestamp_string = datetime.fromtimestamp( + current_timestamp, timezone.utc + ).strftime(timestampFormat) + + output = {"PlannedTimestamp": current_timestamp_string} + + output.update(remediation_details) + + sfn_stub.add_response( + "send_task_success", + {}, + {"taskToken": body["TaskToken"], "output": json.dumps(output)}, + ) + + sfn_stub.activate() + + with patch(client, side_effect=lambda service, **_: clients[service]): + response = lambda_handler(event, {}) + final_item = dynamodb_client.get_item( + TableName=table_name, Key={"AccountID-Region": {"S": table_key}} + ) + assert final_item["Item"]["LastExecutedTimestamp"]["S"] == str( + current_timestamp + ) + assert ( + response + == f"Remediation scheduled to execute at {current_timestamp_string}" + ) + + sfn_stub.deactivate() + + +@mock_aws +def test_recent_remediation(mocker): + dynamodb_client = boto3.client("dynamodb", config=BOTO_CONFIG) + sfn_client = boto3.client("stepfunctions", config=BOTO_CONFIG) + sfn_stub = Stubber(sfn_client) + clients = {"dynamodb": dynamodb_client, "stepfunctions": sfn_client} + current_timestamp = int(datetime.now(timezone.utc).timestamp()) + found_timestamp = current_timestamp + 100 + + create_table() + dynamodb_client.put_item( + TableName=table_name, + Item={ + "AccountID-Region": {"S": table_key}, + "LastExecutedTimestamp": {"S": str(found_timestamp)}, + }, + ) + + new_timestamp = found_timestamp + 3 + + planned_timestamp = datetime.fromtimestamp(new_timestamp, timezone.utc).strftime( + timestampFormat + ) + + output = {"PlannedTimestamp": planned_timestamp} + + output.update(remediation_details) + + sfn_stub.add_response( + "send_task_success", + {}, + {"taskToken": body["TaskToken"], "output": json.dumps(output)}, + ) + + sfn_stub.activate() + + with patch(client, side_effect=lambda service, **_: clients[service]): + response = lambda_handler(event, {}) + final_item = dynamodb_client.get_item( + TableName=table_name, Key={"AccountID-Region": {"S": table_key}} + ) + assert final_item["Item"]["LastExecutedTimestamp"]["S"] == str(new_timestamp) + assert response == f"Remediation scheduled to execute at {planned_timestamp}" + + sfn_stub.deactivate() + + +@mock_aws +def test_account_missing_last_executed(mocker): + dynamodb_client = boto3.client("dynamodb", config=BOTO_CONFIG) + sfn_client = boto3.client("stepfunctions", config=BOTO_CONFIG) + sfn_stub = Stubber(sfn_client) + clients = {"dynamodb": dynamodb_client, "stepfunctions": sfn_client} + create_table() + + dynamodb_client.put_item( + TableName=table_name, + Item={"AccountID-Region": {"S": table_key}}, + ) + + current_timestamp = int(datetime.now(timezone.utc).timestamp()) + + current_timestamp_string = datetime.fromtimestamp( + current_timestamp, timezone.utc + ).strftime(timestampFormat) + + output = {"PlannedTimestamp": current_timestamp_string} + + output.update(remediation_details) + + sfn_stub.add_response( + "send_task_success", + {}, + {"taskToken": body["TaskToken"], "output": json.dumps(output)}, + ) + + sfn_stub.activate() + with patch(client, side_effect=lambda service, **_: clients[service]): + response = lambda_handler(event, {}) + final_item = dynamodb_client.get_item( + TableName=table_name, Key={"AccountID-Region": {"S": table_key}} + ) + assert final_item["Item"]["LastExecutedTimestamp"]["S"] == str( + current_timestamp + ) + assert ( + response + == f"Remediation scheduled to execute at {current_timestamp_string}" + ) + + sfn_stub.deactivate() + + +def test_failure(mocker): + sfn_client = boto3.client("stepfunctions", config=BOTO_CONFIG) + sfn_stub = Stubber(sfn_client) + clients = {"stepfunctions": sfn_client} + os.environ["RemediationWaitTime"] = "NOT A NUMBER" + + sfn_stub.add_response( + "send_task_failure", + {}, + { + "cause": "invalid literal for int() with base 10: 'NOT A NUMBER'", + "error": "ValueError", + "taskToken": body["TaskToken"], + }, + ) + + sfn_stub.activate() + with patch(client, side_effect=lambda service, **_: clients[service]): + lambda_handler(event, {}) + + sfn_stub.deactivate() diff --git a/source/Orchestrator/test/test_send_notifications.py b/source/Orchestrator/test/test_send_notifications.py index 1de79976..28b1779d 100644 --- a/source/Orchestrator/test/test_send_notifications.py +++ b/source/Orchestrator/test/test_send_notifications.py @@ -4,53 +4,47 @@ Unit Test: exec_ssm_doc.py Run from /deployment/temp/source/Orchestrator after running build-s3-dist.sh """ - -import os from send_notifications import lambda_handler, set_message_prefix_and_suffix -from pytest_mock import mocker event = { - 'Notification': { - 'State': 'SUCCESS', - 'Message': 'A Door is Ajar' - }, - 'SecurityStandard': 'AFSBP', - 'ControlId': 'foobar.1' + "Notification": {"State": "SUCCESS", "Message": "A Door is Ajar"}, + "SecurityStandard": "AFSBP", + "ControlId": "foobar.1", } + + def test_resolved(mocker): event = { - 'Notification': { - 'State': 'SUCCESS', - 'Message': 'A Door is Ajar' - }, - 'SecurityStandard': 'AFSBP', - 'ControlId': 'foobar.1' + "Notification": {"State": "SUCCESS", "Message": "A Door is Ajar"}, + "SecurityStandard": "AFSBP", + "ControlId": "foobar.1", } - mocker.patch('send_notifications.sechub_findings.SHARRNotification.notify', return_value=None) - assert lambda_handler(event, {}) == None + mocker.patch( + "send_notifications.sechub_findings.SHARRNotification.notify", return_value=None + ) + mocker.patch("send_notifications.CloudWatchMetrics.send_metric", return_value=None) + assert lambda_handler(event, {}) is None + def test_wrong_standard(mocker): event = { - 'Notification': { - 'State': 'WRONGSTANDARD', - 'Message': 'A Door is Ajar' - }, - 'SecurityStandard': 'AFSBP', - 'ControlId': 'foobar.1' + "Notification": {"State": "WRONGSTANDARD", "Message": "A Door is Ajar"}, + "SecurityStandard": "AFSBP", + "ControlId": "foobar.1", } - mocker.patch('send_notifications.sechub_findings.SHARRNotification.notify', return_value=None) - assert lambda_handler(event, {}) == None + mocker.patch( + "send_notifications.sechub_findings.SHARRNotification.notify", return_value=None + ) + mocker.patch("send_notifications.CloudWatchMetrics.send_metric", return_value=None) + assert lambda_handler(event, {}) is None + def test_message_prefix_and_suffix(): event = { - 'Notification': { - 'ExecId': 'Test Prefix', - 'AffectedObject': 'Test Suffix' - }, - 'SecurityStandard': 'AFSBP', - 'ControlId': 'foobar.1' + "Notification": {"ExecId": "Test Prefix", "AffectedObject": "Test Suffix"}, + "SecurityStandard": "AFSBP", + "ControlId": "foobar.1", } messagePrefix, messageSuffix = set_message_prefix_and_suffix(event) assert messagePrefix == "Test Prefix: " assert messageSuffix == " (Test Suffix)" - diff --git a/source/jest.config.ts b/source/jest.config.ts index 23b6d6be..7fec58c8 100644 --- a/source/jest.config.ts +++ b/source/jest.config.ts @@ -11,6 +11,7 @@ const config: Config = { '/playbooks/NEWPLAYBOOK/test', '/playbooks/PCI321/test', '/playbooks/SC/test', + '/playbooks/NIST80053/test', '/remediation_runbooks', '/solution_deploy', '/test', diff --git a/source/LambdaLayers/.coveragerc b/source/layer/.coveragerc similarity index 100% rename from source/LambdaLayers/.coveragerc rename to source/layer/.coveragerc diff --git a/source/layer/__init__.py b/source/layer/__init__.py new file mode 100644 index 00000000..04f8b7b7 --- /dev/null +++ b/source/layer/__init__.py @@ -0,0 +1,2 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 diff --git a/source/LambdaLayers/applogger.py b/source/layer/applogger.py similarity index 70% rename from source/LambdaLayers/applogger.py rename to source/layer/applogger.py index d41fc058..9ec845ca 100644 --- a/source/LambdaLayers/applogger.py +++ b/source/layer/applogger.py @@ -10,29 +10,30 @@ import os import time from datetime import date -import boto3 + from botocore.exceptions import ClientError -from botocore.config import Config -from utils import partition_from_region -import awsapi_cached_client +from layer import awsapi_cached_client -LOG_MAX_BATCH_SIZE = 1048576 # Controls when the buffer is flushed to the stream +LOG_MAX_BATCH_SIZE = 1048576 # Controls when the buffer is flushed to the stream LOG_ENTRY_ADDITIONAL = 26 + def get_logs_connection(apiclient): # returns a client id for ssm in the region of the finding via apiclient - return apiclient.get_connection('logs') + return apiclient.get_connection("logs") + class FailedToCreateLogGroup(Exception): pass -class LogHandler(object): +class LogHandler(object): def __init__(self, stream_name): - - self.apiclient = awsapi_cached_client.AWSCachedClient(os.getenv('AWS_DEFAULT_REGION', 'us-east-1')) + self.apiclient = awsapi_cached_client.AWSCachedClient( + os.getenv("AWS_DEFAULT_REGION", "us-east-1") + ) self.stream_name = stream_name.upper() - self.log_group = os.getenv('SOLUTION_LOGGROUP', 'SO0111-SHARR') + self.log_group = os.getenv("SOLUTION_LOGGROUP", "SO0111-SHARR") self._stream_token = None self._buffer = [] self._buffer_size = 0 @@ -46,7 +47,7 @@ def _create_log_group(self): try: get_logs_connection(self.apiclient).create_log_group( logGroupName=self.log_group - ) + ) except Exception as e: # if the stream was created in between the call ignore the error if type(e).__name__ != "ResourceAlreadyExistsException": @@ -56,18 +57,22 @@ def _create_log_group(self): def _create_log_stream(self, log_stream): """Create a new log stream""" # append today's date to stream name - log_stream = log_stream + '-' + str(date.today()) + log_stream = log_stream + "-" + str(date.today()) try: print(("Creating log stream {}".format(log_stream))) - get_logs_connection(self.apiclient).create_log_stream(logGroupName=self.log_group, logStreamName=log_stream) + get_logs_connection(self.apiclient).create_log_stream( + logGroupName=self.log_group, logStreamName=log_stream + ) self._stream_token = "0" except Exception as e: # if the stream was created in between the call ignore the error if type(e).__name__ == "ResourceAlreadyExistsException": - print('Log Stream already exists') + print("Log Stream already exists") elif type(e).__name__ == "ResourceNotFoundException": if self._create_log_group(): - get_logs_connection(self.apiclient).create_log_stream(logGroupName=self.log_group, logStreamName=log_stream) + get_logs_connection(self.apiclient).create_log_stream( + logGroupName=self.log_group, logStreamName=log_stream + ) else: raise FailedToCreateLogGroup else: @@ -78,14 +83,19 @@ def add_message(self, message): """Write a message to the buffer""" # Empty messages cause flush throw an exception if not message: - message = ' ' + message = " " timestamp = int(time.time() * 1000) - if self._buffer_size + (len(message) + LOG_ENTRY_ADDITIONAL) > LOG_MAX_BATCH_SIZE: + if ( + self._buffer_size + (len(message) + LOG_ENTRY_ADDITIONAL) + > LOG_MAX_BATCH_SIZE + ): self.flush() # put the timestamped message in the buffer self._buffer.append((timestamp, message)) - self._buffer_size += (len(message) + LOG_ENTRY_ADDITIONAL) # calculate new buffer size + self._buffer_size += ( + len(message) + LOG_ENTRY_ADDITIONAL + ) # calculate new buffer size def flush(self): """Write the buffer to the CW Logs stream""" @@ -101,7 +111,7 @@ def flush(self): put_event_args = { "logGroupName": self.log_group, "logStreamName": log_stream, - "logEvents": [{"timestamp": r[0], "message": r[1]} for r in self._buffer] + "logEvents": [{"timestamp": r[0], "message": r[1]} for r in self._buffer], } # Send to CW Logs with retry if token has changed @@ -110,20 +120,32 @@ def flush(self): # add sequence token to API call parms if present if self._stream_token: put_event_args["sequenceToken"] = self._stream_token - resp = get_logs_connection(self.apiclient).put_log_events(**put_event_args) + resp = get_logs_connection(self.apiclient).put_log_events( + **put_event_args + ) self._stream_token = resp.get("nextSequenceToken", None) break except ClientError as ex: - exception_type = ex.response['Error']['Code'] + exception_type = ex.response["Error"]["Code"] # stream did exist but need new token, get it from exception data - if exception_type in ["InvalidSequenceTokenException", "DataAlreadyAcceptedException"]: + if exception_type in [ + "InvalidSequenceTokenException", + "DataAlreadyAcceptedException", + ]: # update the token and retry try: - self._stream_token = ex.response['Error']['Message'].split(":")[-1].strip() + self._stream_token = ( + ex.response["Error"]["Message"].split(":")[-1].strip() + ) print("Token changed. Will be retried.") - print(("Token for existing stream {} is {}".format( - self.stream_name, self._stream_token))) - except: + print( + ( + "Token for existing stream {} is {}".format( + self.stream_name, self._stream_token + ) + ) + ) + except Exception: self._stream_token = None raise else: diff --git a/source/layer/awsapi_cached_client.py b/source/layer/awsapi_cached_client.py new file mode 100644 index 00000000..f5e9639e --- /dev/null +++ b/source/layer/awsapi_cached_client.py @@ -0,0 +1,160 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import os +from typing import TYPE_CHECKING, Any, Final, Optional + +import boto3 +from boto3 import Session +from botocore.config import Config + +if TYPE_CHECKING: + from mypy_boto3_sts.client import STSClient +else: + STSClient = object + + +class AWSCachedClient: + """ + Maintains a hash of AWS API Client connections by region and service + """ + + account: Optional[str] = "" + region: Optional[str] = "" + client: dict[str, Any] = {} + solution_id = "" + solution_version = "undefined" + + def __init__(self, region: Optional[str]) -> None: + """ + Create a Boto3 Client object. Region is used for operations such + as retrieving account number, and as the default for get_connection. + """ + self.solution_id = os.getenv("SOLUTION_ID", "SO0111") + self.solution_version = os.getenv("SOLUTION_VERSION", "undefined") + self.region = region + self.boto_config = Config( + user_agent_extra=f"AwsSolution/{self.solution_id}/{self.solution_version}", + retries={"max_attempts": 10, "mode": "standard"}, + ) + + self.account = self._get_local_account_id() + + def get_connection(self, service: str, region: Optional[str] = None) -> Any: + """Connect to AWS api""" + + if not region: + region = self.region + + if service not in self.client: + self.client[service] = {} + + if region not in self.client[service]: + self.client[service][region] = boto3.client( + service, region_name=region, config=self.boto_config + ) + + return self.client[service][region] + + def _get_local_account_id(self) -> Optional[str]: + """ + get local account info + """ + sts: STSClient = self.get_connection("sts", self.region) + aws_account_id = sts.get_caller_identity().get("Account") + return aws_account_id + + +class MissingAssumedRole(Exception): + pass + + +class BotoSession: + client_props: dict[str, Any] = {} + resource_props: dict[str, Any] = {} + STS: Optional[STSClient] = None + partition: Optional[str] = None + session: Optional[boto3.session.Session] = None + target: Optional[str] = None + role: Optional[str] = None + + def create_session(self) -> None: + self.STS = None + self.STS = self._create_sts_client() + + if not self.target: + self.target = self.STS.get_caller_identity()["Account"] + remote_account = self.STS.assume_role( + RoleArn="arn:" # type: ignore[operator] + + self.partition + + ":iam::" + + self.target + + ":role/" + + self.role, + RoleSessionName="sechub_admin", + ) + self.session = boto3.session.Session( + aws_access_key_id=remote_account["Credentials"]["AccessKeyId"], + aws_secret_access_key=remote_account["Credentials"]["SecretAccessKey"], + aws_session_token=remote_account["Credentials"]["SessionToken"], + ) + + boto3.setup_default_session() + + def _create_sts_client(self) -> Any: + """ + Create the sts client + """ + + session: Final = Session() + sts_regional_endpoint: Final = str.format( + "https://sts.{}.amazonaws.com", session.region_name + ) + # STS client __must__ use a regional endpoint so that tokens are version 2. + # version 1 tokens are not valid in opt-in regions unless enabled on an + # account level + return session.client( + "sts", + region_name=session.region_name, + endpoint_url=sts_regional_endpoint, + config=self.boto_config, + ) + + def __init__( + self, + account: Optional[str] = None, + role: Optional[str] = None, + partition: Optional[str] = None, + ) -> None: + """ + Create a session + account: None or the target account + """ + # Default partition to 'aws' + if not partition: + partition = "aws" + self.target = account + if not role: + raise MissingAssumedRole + else: + self.role = role + self.session = None + self.partition = os.getenv("AWS_PARTITION", partition) + self.solution_id = os.getenv("SOLUTION_ID", "SO0111") + self.solution_version = os.getenv("SOLUTION_VERSION", "undefined") + self.boto_config = Config( + user_agent_extra=f"AwsSolution/{self.solution_id}/{self.solution_version}", + retries={"max_attempts": 10, "mode": "standard"}, + ) + self.create_session() + + def client(self, name: str, **kwargs: Any) -> Any: + self.client_props[name] = self.session.client( # type: ignore[union-attr] + name, config=self.boto_config, **kwargs + ) + return self.client_props[name] + + def resource(self, name: str, **kwargs: Any) -> Any: + self.resource_props[name] = self.session.resource( # type: ignore[union-attr] + name, config=self.boto_config, **kwargs + ) + return self.resource_props[name] diff --git a/source/layer/cloudwatch_metrics.py b/source/layer/cloudwatch_metrics.py new file mode 100644 index 00000000..924660b4 --- /dev/null +++ b/source/layer/cloudwatch_metrics.py @@ -0,0 +1,100 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import os +from typing import TYPE_CHECKING, Any, cast + +import boto3 +from layer.logger import Logger + +if TYPE_CHECKING: + from mypy_boto3_cloudwatch import CloudWatchClient +else: + CloudWatchClient = object + +from layer import awsapi_cached_client + +if TYPE_CHECKING: + from mypy_boto3_ssm.client import SSMClient +else: + SSMClient = object + +# initialise loggers +LOG_LEVEL = os.getenv("log_level", "info") +LOGGER = Logger(loglevel=LOG_LEVEL) + + +class CloudWatchMetrics: + namespace = "ASR" + + def __init__(self): + try: + self.session = boto3.session.Session() + self.region = self.session.region_name + self.ssm_client = self.init_ssm_client() + self.metrics_enabled = self.send_cloudwatch_metrics_enabled() + if not self.metrics_enabled: + return + + self.cloudwatch_client = self.init_cloudwatch_client() + + except Exception as e: + print(e) + LOGGER.error("Could not initialize metrics") + raise + + def send_cloudwatch_metrics_enabled(self): + is_enabled = False # default value + try: + ssm_parm = "/Solutions/SO0111/sendCloudwatchMetrics" + send_cloudwatch_metrics_from_ssm = ( + self.ssm_client.get_parameter(Name=ssm_parm) # type: ignore[union-attr] + .get("Parameter") + .get("Value") + ) + + if ( + send_cloudwatch_metrics_from_ssm is None + or send_cloudwatch_metrics_from_ssm.lower() not in ["yes", "no"] + ): + print( + f'Unexpected value for {ssm_parm}: {send_cloudwatch_metrics_from_ssm}. Defaulting to "no"' + ) + elif send_cloudwatch_metrics_from_ssm.lower() == "yes": + is_enabled = True + + except Exception as e: + print(e) + + return is_enabled + + def init_ssm_client(self) -> SSMClient: + try: + new_ssm_client = awsapi_cached_client.AWSCachedClient( + self.region + ).get_connection("ssm") + return cast(SSMClient, new_ssm_client) + + except Exception as e: + print(f"Could not connect to ssm: {str(e)}") + raise e + + def init_cloudwatch_client(self) -> CloudWatchClient: + try: + new_cloudwatch_client = awsapi_cached_client.AWSCachedClient( + self.region + ).get_connection("cloudwatch") + return cast(CloudWatchClient, new_cloudwatch_client) + except Exception as e: + print(f"Could not connect to cloudwatch: {str(e)}") + raise e + + def send_metric(self, metric: Any) -> None: + try: + if metric is None or not self.metrics_enabled or not self.cloudwatch_client: + return + self.cloudwatch_client.put_metric_data( + MetricData=[metric], + Namespace=self.namespace, + ) + except Exception as exception: + print(f"Could not send cloudwatch metric: {str(exception)}") diff --git a/source/LambdaLayers/logger.py b/source/layer/logger.py similarity index 90% rename from source/LambdaLayers/logger.py rename to source/layer/logger.py index 2bfc43c4..ba7aca71 100644 --- a/source/LambdaLayers/logger.py +++ b/source/layer/logger.py @@ -2,7 +2,8 @@ # SPDX-License-Identifier: Apache-2.0 import json import logging -from datetime import datetime, date +from datetime import date, datetime + class DateTimeEncoder(json.JSONEncoder): def default(self, o): @@ -11,18 +12,18 @@ def default(self, o): return serial raise TypeError("Type %s not serializable" % type(o)) -class Logger(object): - def __init__(self, loglevel='warning'): +class Logger(object): + def __init__(self, loglevel="warning"): """Initializes logging""" self.config(loglevel=loglevel) - def config(self, loglevel='warning'): + def config(self, loglevel="warning"): loglevel = logging.getLevelName(loglevel.upper()) mainlogger = logging.getLogger() mainlogger.setLevel(loglevel) - logfmt = '%(levelname)s %(message)s\n' + logfmt = "%(levelname)s %(message)s\n" if len(mainlogger.handlers) == 0: mainlogger.addHandler(logging.StreamHandler()) mainlogger.handlers[0].setFormatter(logging.Formatter(logfmt)) @@ -48,7 +49,6 @@ def debug(self, message, **kwargs): self.log.debug(self._format(message), **kwargs) def info(self, message, **kwargs): - ## type: (object, object) -> object """wrapper for logging.info call""" self.log.info(self._format(message), **kwargs) diff --git a/source/layer/metrics.py b/source/layer/metrics.py new file mode 100644 index 00000000..ca9d4ee7 --- /dev/null +++ b/source/layer/metrics.py @@ -0,0 +1,168 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import uuid +from datetime import datetime +from typing import TYPE_CHECKING, Any, Optional +from urllib.request import Request, urlopen + +import boto3 +from botocore.exceptions import ClientError +from layer import awsapi_cached_client + +if TYPE_CHECKING: + from mypy_boto3_ssm.client import SSMClient +else: + SSMClient = object + + +class Metrics(object): + event_type = "" + send_metrics_option = "No" + solution_version: Any = "" + solution_uuid = None + session = None + region = None + ssm_client: Optional[SSMClient] = None + metrics_parameter_name = "/Solutions/SO0111/anonymous_metrics_uuid" + + def __init__(self, event_type=""): + self.session = boto3.session.Session() + self.region = self.session.region_name + + self.ssm_client = self.connect_to_ssm() + + if not self.send_anonymous_metrics_enabled(): + return + + self.event_type = event_type + + self.__get_solution_uuid() + + try: + solution_version_parm = "/Solutions/SO0111/version" + solution_version_from_ssm = ( + self.ssm_client.get_parameter(Name=solution_version_parm) # type: ignore[union-attr] + .get("Parameter") + .get("Value") + ) + except ClientError as ex: + exception_type = ex.response["Error"]["Code"] + if exception_type == "ParameterNotFound": + solution_version_from_ssm = "unknown" + else: + print(ex) + except Exception as e: + print(e) + raise + + self.solution_version = solution_version_from_ssm + + def send_anonymous_metrics_enabled(self): + is_enabled = False # default value + try: + ssm_parm = "/Solutions/SO0111/sendAnonymizedMetrics" + send_anonymous_metrics_from_ssm = ( + self.ssm_client.get_parameter(Name=ssm_parm) # type: ignore[union-attr] + .get("Parameter") + .get("Value") + .lower() + ) + + if ( + send_anonymous_metrics_from_ssm != "yes" + and send_anonymous_metrics_from_ssm != "no" + ): + print( + f'Unexpected value for {ssm_parm}: {send_anonymous_metrics_from_ssm}. Defaulting to "no"' + ) + elif send_anonymous_metrics_from_ssm == "yes": + is_enabled = True + + except Exception as e: + print(e) + + return is_enabled + + def connect_to_ssm(self): + try: + if not self.ssm_client: + new_ssm_client = awsapi_cached_client.AWSCachedClient( + self.region + ).get_connection("ssm") + return new_ssm_client + except Exception as e: + print(f"Could not connect to ssm: {str(e)}") + + def __update_solution_uuid(self, new_uuid): + self.ssm_client.put_parameter( # type: ignore[union-attr] + Name=self.metrics_parameter_name, + Description="Unique Id for anonymous metrics collection", + Value=new_uuid, + Type="String", + ) + + def __get_solution_uuid(self): + try: + solution_uuid_from_ssm = ( + self.ssm_client.get_parameter(Name=self.metrics_parameter_name) # type: ignore[union-attr] + .get("Parameter") + .get("Value") + ) + self.solution_uuid = solution_uuid_from_ssm + except ClientError as ex: + exception_type = ex.response["Error"]["Code"] + if exception_type == "ParameterNotFound": + self.solution_uuid = str(uuid.uuid4()) + self.__update_solution_uuid(self.solution_uuid) + else: + print(ex) + raise + except Exception as e: + print(e) + raise + + def get_metrics_from_finding(self, finding): + try: + if finding is not None: + metrics_data = { + "generator_id": finding.get("GeneratorId"), + "type": finding.get("Title"), + "productArn": finding.get("ProductArn"), + "finding_triggered_by": self.event_type, + "region": self.region, + } + else: + metrics_data = {} + return metrics_data + except Exception as excep: + print(excep) + return {} + + def send_metrics(self, metrics_data): + try: + if metrics_data is not None and self.send_anonymous_metrics_enabled(): + usage_data = { + "Solution": "SO0111", + "UUID": self.solution_uuid, + "TimeStamp": str(datetime.utcnow().isoformat()), + "Data": metrics_data, + "Version": self.solution_version, + } + print(f"Sending metrics data {json.dumps(usage_data)}") + self.post_metrics_to_api(usage_data) + + else: + return + except Exception as excep: + print(excep) + + def post_metrics_to_api(self, request_data): + url = "https://metrics.awssolutionsbuilder.com/generic" + req = Request( + url, + method="POST", + data=bytes(json.dumps(request_data), encoding="utf8"), + headers={"Content-Type": "application/json"}, + ) + urlopen(req) # nosec diff --git a/source/layer/py.typed b/source/layer/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/source/LambdaLayers/sechub_findings.py b/source/layer/sechub_findings.py similarity index 51% rename from source/LambdaLayers/sechub_findings.py rename to source/layer/sechub_findings.py index 629753dc..f8ae0c4f 100644 --- a/source/LambdaLayers/sechub_findings.py +++ b/source/layer/sechub_findings.py @@ -1,99 +1,115 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re -import json import inspect +import json import os -import boto3 -from utils import publish_to_sns -from awsapi_cached_client import AWSCachedClient +from typing import Any, Union + from botocore.exceptions import ClientError +from layer.awsapi_cached_client import AWSCachedClient +from layer.utils import publish_to_sns # Get AWS region from Lambda environment. If not present then we're not # running under lambda, so defaulting to us-east-1 securityhub = None + + def get_securityhub(): global securityhub - if securityhub == None: - securityhub = AWSCachedClient(os.getenv('AWS_DEFAULT_REGION', 'us-east-1')).get_connection('securityhub') + if securityhub is None: + securityhub = AWSCachedClient( + os.getenv("AWS_DEFAULT_REGION", "us-east-1") + ).get_connection("securityhub") return securityhub -UNHANDLED_CLIENT_ERROR = 'An unhandled client error occurred: ' + + +UNHANDLED_CLIENT_ERROR = "An unhandled client error occurred: " # Local functions + def get_ssm_connection(apiclient): # returns a client id for ssm in the region of the finding via apiclient - return apiclient.get_connection('ssm') + return apiclient.get_connection("ssm") + # Classes + class InvalidFindingJson(Exception): pass + class Finding(object): """ Security Hub Finding class """ - details = {} # Assuming ONE finding per event. We'll take the first. - generator_id = 'error' - account_id = 'error' - resource_region = 'error' - standard_name = '' - standard_shortname = 'error' - standard_version = 'error' - standard_control = 'error' - remediation_control = '' - standard_version_supported = 'False' - title = '' - description = '' + + details: Any = {} # Assuming ONE finding per event. We'll take the first. + generator_id = "error" + account_id = "error" + resource_region = "error" + standard_name = "" + standard_shortname = "error" + standard_version = "error" + standard_control = "error" + remediation_control = "" + standard_version_supported = "False" + title = "" + description = "" region = None - arn = '' - uuid = '' + arn = "" + uuid = "" def __init__(self, finding_rec): - self.region = os.getenv('AWS_DEFAULT_REGION', 'us-east-1') + self.region = os.getenv("AWS_DEFAULT_REGION", "us-east-1") self.aws_api_client = AWSCachedClient(self.region) self.details = finding_rec - self.arn = self.details.get('Id', 'error') - self.uuid = self.arn.split ('/finding/')[1] - self.generator_id = self.details.get('GeneratorId', 'error') - self.account_id = self.details.get('AwsAccountId', 'error') - resource = self.details.get('Resources',[])[0] - self.resource_region = resource.get('Region','error') + self.arn = self.details.get("Id", "error") + self.uuid = self.arn.split("/finding/")[1] + self.generator_id = self.details.get("GeneratorId", "error") + self.account_id = self.details.get("AwsAccountId", "error") + resource = self.details.get("Resources", [])[0] + self.resource_region = resource.get("Region", "error") if not self.is_valid_finding_json(): raise InvalidFindingJson - self.title = self.details.get('Title', 'error') - self.description = self.details.get('Description', 'error') - self.remediation_url = self.details.get('Remediation', {}).get('Recommendation', {}).get('Url', '') + self.title = self.details.get("Title", "error") + self.description = self.details.get("Description", "error") + self.remediation_url = ( + self.details.get("Remediation", {}).get("Recommendation", {}).get("Url", "") + ) - if self.details.get('ProductFields').get('StandardsControlArn', None) is not None: + if ( + self.details.get("ProductFields").get("StandardsControlArn", None) + is not None + ): self._get_security_standard_fields_from_arn( - self.details.get('ProductFields').get('StandardsControlArn') + self.details.get("ProductFields").get("StandardsControlArn") ) else: - self.standard_control = self.details.get('Compliance').get('SecurityControlId') - self.standard_version = '2.0.0' - self.standard_name = 'security-control' - - + self.standard_control = self.details.get("Compliance").get( + "SecurityControlId" + ) + self.standard_version = "2.0.0" + self.standard_name = "security-control" self._get_security_standard_abbreviation_from_ssm() self._get_control_remap() self._set_standard_version_supported() def is_valid_finding_json(self): - if self.generator_id == 'error': + if self.generator_id == "error": return False # Verify finding['Id'] - if not self.details.get('Id'): + if not self.details.get("Id"): return False # Account Id - if self.account_id == 'error': + if self.account_id == "error": return False return True @@ -102,7 +118,7 @@ def resolve(self, message): """ Update the finding_id workflow status to "RESOLVED" """ - self.update_text(message, status='RESOLVED') + self.update_text(message, status="RESOLVED") def flag(self, message): """ @@ -111,7 +127,7 @@ def flag(self, message): so multiple remediations are not initiated when automatic triggers are in use. """ - self.update_text(message, status='NOTIFIED') + self.update_text(message, status="NOTIFIED") def update_text(self, message, status=None): """ @@ -120,21 +136,18 @@ def update_text(self, message, status=None): workflow_status = {} if status: - workflow_status = { 'Workflow': { 'Status': status } } + workflow_status = {"Workflow": {"Status": status}} try: get_securityhub().batch_update_findings( FindingIdentifiers=[ { - 'Id': self.details.get('Id'), - 'ProductArn': self.details.get('ProductArn') + "Id": self.details.get("Id"), + "ProductArn": self.details.get("ProductArn"), } ], - Note={ - 'Text': message, - 'UpdatedBy': inspect.stack()[0][3] - }, - **workflow_status + Note={"Text": message, "UpdatedBy": inspect.stack()[0][3]}, + **workflow_status, ) except Exception as e: @@ -142,22 +155,26 @@ def update_text(self, message, status=None): raise def _get_security_standard_fields_from_arn(self, arn): - standards_arn_parts = arn.split(':')[5].split('/') + standards_arn_parts = arn.split(":")[5].split("/") self.standard_name = standards_arn_parts[1] self.standard_version = standards_arn_parts[3] self.standard_control = standards_arn_parts[4] def _get_control_remap(self): - self.remediation_control = self.standard_control # Defaults to self + self.remediation_control = self.standard_control # Defaults to self try: local_ssm = get_ssm_connection(self.aws_api_client) - remap = local_ssm.get_parameter( - Name=f'/Solutions/SO0111/{self.standard_shortname}/{self.standard_version}/{self.standard_control}/remap' - ).get('Parameter').get('Value') + remap = ( + local_ssm.get_parameter( + Name=f"/Solutions/SO0111/{self.standard_shortname}/{self.standard_version}/{self.standard_control}/remap" + ) + .get("Parameter") + .get("Value") + ) self.remediation_control = remap except ClientError as ex: - exception_type = ex.response['Error']['Code'] + exception_type = ex.response["Error"]["Code"] if exception_type in "ParameterNotFound": return else: @@ -169,18 +186,21 @@ def _get_control_remap(self): return def _get_security_standard_abbreviation_from_ssm(self): - try: local_ssm = get_ssm_connection(self.aws_api_client) - abbreviation = local_ssm.get_parameter( - Name=f'/Solutions/SO0111/{self.standard_name}/{self.standard_version}/shortname' - ).get('Parameter').get('Value') + abbreviation = ( + local_ssm.get_parameter( + Name=f"/Solutions/SO0111/{self.standard_name}/{self.standard_version}/shortname" + ) + .get("Parameter") + .get("Value") + ) self.standard_shortname = abbreviation except ClientError as ex: - exception_type = ex.response['Error']['Code'] + exception_type = ex.response["Error"]["Code"] if exception_type in "ParameterNotFound": - self.security_standard = 'notfound' + self.security_standard = "notfound" else: print(UNHANDLED_CLIENT_ERROR + exception_type) return @@ -193,51 +213,57 @@ def _set_standard_version_supported(self): try: local_ssm = get_ssm_connection(self.aws_api_client) - version_status = local_ssm.get_parameter( - Name=f'/Solutions/SO0111/{self.standard_name}/{self.standard_version}/status' - ).get('Parameter').get('Value') + version_status = ( + local_ssm.get_parameter( + Name=f"/Solutions/SO0111/{self.standard_name}/{self.standard_version}/status" + ) + .get("Parameter") + .get("Value") + ) - if version_status == 'enabled': - self.standard_version_supported = 'True' + if version_status == "enabled": + self.standard_version_supported = "True" else: - self.standard_version_supported = 'False' + self.standard_version_supported = "False" except ClientError as ex: - exception_type = ex.response['Error']['Code'] + exception_type = ex.response["Error"]["Code"] if exception_type in "ParameterNotFound": - self.standard_version_supported = 'False' + self.standard_version_supported = "False" else: print(UNHANDLED_CLIENT_ERROR + exception_type) - self.standard_version_supported = 'False' + self.standard_version_supported = "False" except Exception as e: print(UNHANDLED_CLIENT_ERROR + str(e)) - self.standard_version_supported = 'False' + self.standard_version_supported = "False" + -#================ +# ================ # Utilities -#================ +# ================ class InvalidValue(Exception): pass + class SHARRNotification(object): # These are private - they cannot be changed after the object is created - __security_standard = '' + __security_standard = "" __controlid = None - __region = '' + __region = "" - severity = 'INFO' - message = '' - logdata = [] + severity = "INFO" + message = "" + logdata: Any = [] send_to_sns = False - finding_info = {} + finding_info: Union[dict[str, Any], str] = {} def __init__(self, security_standard, region, controlid=None): """ Initialize the class applogger_name determines the log stream name in CW Logs ex. SHARRNotification(, 'us-east-1', None) -> logs to -2021-01-22 - ex. SHARRNotification('AFSBP', 'us-east-1', 'EC2.1') -> logs to AFSBP-EC2.1-2021-01-22 + ex. SHARRNotification('FSBP', 'us-east-1', 'EC2.1') -> logs to FSBP-EC2.1-2021-01-22 """ self.__security_standard = security_standard self.__region = region @@ -249,11 +275,11 @@ def _get_log_handler(self): """ Create a loghandler object """ - from applogger import LogHandler + from layer.applogger import LogHandler applogger_name = self.__security_standard if self.__controlid: - applogger_name += '-' + self.__controlid + applogger_name += "-" + self.__controlid applogger = LogHandler(applogger_name) return applogger @@ -266,28 +292,20 @@ def notify(self): Send notifications to the application CW Logs stream and sns """ sns_notify_json = { - 'severity': self.severity, - 'message': self.message, - 'finding': self.finding_info + "severity": self.severity, + "message": self.message, + "finding": self.finding_info, } if self.send_to_sns: sent_id = publish_to_sns( - 'SO0111-SHARR_Topic', - json.dumps( - sns_notify_json, - indent=2, - default=str - ), - self.__region + "SO0111-SHARR_Topic", + json.dumps(sns_notify_json, indent=2, default=str), + self.__region, ) - print(f'Notification message ID {sent_id} sent.') - self.applogger.add_message( - self.severity + ': ' + self.message - ) + print(f"Notification message ID {sent_id} sent.") + self.applogger.add_message(self.severity + ": " + self.message) if self.logdata: for line in self.logdata: - self.applogger.add_message( - line - ) + self.applogger.add_message(line) self.applogger.flush() diff --git a/source/layer/test/__init__.py b/source/layer/test/__init__.py new file mode 100644 index 00000000..04f8b7b7 --- /dev/null +++ b/source/layer/test/__init__.py @@ -0,0 +1,2 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 diff --git a/source/LambdaLayers/test/conftest.py b/source/layer/test/conftest.py similarity index 80% rename from source/LambdaLayers/test/conftest.py rename to source/layer/test/conftest.py index 140e5897..3886f3f0 100644 --- a/source/LambdaLayers/test/conftest.py +++ b/source/layer/test/conftest.py @@ -1,9 +1,11 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import os -import pytest from unittest.mock import patch -from awsapi_cached_client import AWSCachedClient + +import pytest +from layer.awsapi_cached_client import AWSCachedClient + @pytest.fixture(scope="module", autouse=True) def aws_credentials(): @@ -15,9 +17,12 @@ def aws_credentials(): os.environ["SOLUTION_ID"] = "SOTestID" os.environ["AWS_ACCOUNT"] = "123456789012" + @pytest.fixture(scope="module", autouse=True) def mock_get_local_account_id(): - mock = patch.object(AWSCachedClient, '_get_local_account_id', return_value="111111111111") + mock = patch.object( + AWSCachedClient, "_get_local_account_id", return_value="111111111111" + ) mock.start() yield mock.stop() diff --git a/source/LambdaLayers/test/file_utilities.py b/source/layer/test/file_utilities.py similarity index 86% rename from source/LambdaLayers/test/file_utilities.py rename to source/layer/test/file_utilities.py index 2a74de77..1b23c1dc 100644 --- a/source/LambdaLayers/test/file_utilities.py +++ b/source/layer/test/file_utilities.py @@ -2,10 +2,11 @@ # SPDX-License-Identifier: Apache-2.0 import json + def load_test_data(file, region): testdata = open(file) rawdata = testdata.read() - rawdata = rawdata.replace('us-east-1', region) + rawdata = rawdata.replace("us-east-1", region) # Replace all occurences of us-east-1 with event = json.loads(rawdata) testdata.close diff --git a/source/layer/test/test_api_cached_client.py b/source/layer/test/test_api_cached_client.py new file mode 100644 index 00000000..48c84eb1 --- /dev/null +++ b/source/layer/test/test_api_cached_client.py @@ -0,0 +1,38 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +from unittest.mock import ANY, MagicMock, patch + +from layer.awsapi_cached_client import AWSCachedClient, BotoSession + + +def test_create_client(): + AWS = AWSCachedClient("us-east-1") + + AWS.get_connection("sns") # in us-east-1 + my_account = AWS.account + assert my_account + assert "sns" in AWS.client + assert "us-east-1" in AWS.client["sns"] + AWS.get_connection("ec2") + assert "ec2" in AWS.client + assert "us-east-1" in AWS.client["ec2"] + AWS.get_connection("iam", "ap-northeast-1") + assert "iam" in AWS.client + assert "ap-northeast-1" in AWS.client["iam"] + + +@patch("layer.awsapi_cached_client.Session") +def test_boto_session_uses_regional_sts_endpoint(mock_session: MagicMock) -> None: + mock_client = MagicMock() + mock_session.return_value.client = mock_client + region_name = "executing-region" + mock_session.return_value.region_name = region_name + + BotoSession(role="SO0111-SHARR-Orchestrator-Member") + + mock_client.assert_called_with( + "sts", + region_name=region_name, + endpoint_url=f"https://sts.{region_name}.amazonaws.com", + config=ANY, + ) diff --git a/source/LambdaLayers/test/test_applogger.py b/source/layer/test/test_applogger.py similarity index 92% rename from source/LambdaLayers/test/test_applogger.py rename to source/layer/test/test_applogger.py index a27a937d..05cc2b1d 100644 --- a/source/LambdaLayers/test/test_applogger.py +++ b/source/layer/test/test_applogger.py @@ -6,11 +6,10 @@ """ import os from datetime import date + import boto3 -from botocore.stub import Stubber, ANY -import pytest -from pytest_mock import mocker -from applogger import LogHandler +from botocore.stub import ANY, Stubber +from layer.applogger import LogHandler my_session = boto3.session.Session() my_region = my_session.region_name @@ -20,7 +19,6 @@ # # ------------------------------------------------------------------------------ def test_init_default(): - applogger = LogHandler("mystream") assert applogger.log_group == "SO0111-SHARR" @@ -29,7 +27,6 @@ def test_init_default(): # # ------------------------------------------------------------------------------ def test_create_logstream(mocker): - applogger = LogHandler("mystream") assert applogger.log_group == "SO0111-SHARR" @@ -66,7 +63,7 @@ def test_create_logstream(mocker): ) stubbed_logs_client.activate() - mocker.patch("applogger.get_logs_connection", return_value=logsclient) + mocker.patch("layer.applogger.get_logs_connection", return_value=logsclient) applogger.add_message("A door is ajar") assert len(applogger._buffer) == 1 @@ -78,7 +75,6 @@ def test_create_logstream(mocker): # # # #------------------------------------------------------------------------------ def test_init_custom(): - os.environ["SOLUTION_LOGGROUP"] = "MY-LOG-GROUP" applogger = LogHandler("mystream") assert applogger.log_group == "MY-LOG-GROUP" diff --git a/source/layer/test/test_cloudwatch_metrics.py b/source/layer/test/test_cloudwatch_metrics.py new file mode 100644 index 00000000..57e175ac --- /dev/null +++ b/source/layer/test/test_cloudwatch_metrics.py @@ -0,0 +1,135 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import os + +import boto3 +from botocore.stub import Stubber +from layer.cloudwatch_metrics import CloudWatchMetrics + +test_data = "test/test_json_data/" + + +def get_region(): + return os.getenv("AWS_DEFAULT_REGION") + + +mock_ssm_get_parameter_send_cloudwatch_metrics_yes = { + "Parameter": { + "Name": "/Solutions/SO0111/sendCloudwatchMetrics", + "Type": "String", + "Value": "Yes", + "Version": 1, + "LastModifiedDate": "2021-02-25T12:58:50.591000-05:00", + "ARN": f"arn:aws:ssm:{get_region()}:111111111111:parameter/Solutions/SO0111/sendAnonymizedMetrics", + "DataType": "text", + } +} + +mock_ssm_get_parameter_send_cloudwatch_no = { + "Parameter": { + "Name": "/Solutions/SO0111/sendCloudwatchMetrics", + "Type": "String", + "Value": "No", + "Version": 1, + "LastModifiedDate": "2021-02-25T12:58:50.591000-05:00", + "ARN": f"arn:aws:ssm:{get_region()}:111111111111:parameter/Solutions/SO0111/sendAnonymizedMetrics", + "DataType": "text", + } +} + +mock_ssm_get_parameter_send_cloudwatch_bad_value = { + "Parameter": { + "Name": "/Solutions/SO0111/sendCloudwatchMetrics", + "Type": "String", + "Value": "slartibartfast", + "Version": 1, + "LastModifiedDate": "2021-02-25T12:58:50.591000-05:00", + "ARN": f"arn:aws:ssm:{get_region()}:111111111111:parameter/Solutions/SO0111/sendAnonymizedMetrics", + "DataType": "text", + } +} + + +# ------------------------------------------------------------------------------ +# This test verifies that the metrics object is constructed correctly +# ------------------------------------------------------------------------------ +def test_cw_metrics_construction(mocker): + ssmc = boto3.client("ssm", region_name=get_region()) + ssmc_s = Stubber(ssmc) + ssmc_s.add_response( + "get_parameter", mock_ssm_get_parameter_send_cloudwatch_metrics_yes + ) + ssmc_s.activate() + + mocker.patch( + "layer.cloudwatch_metrics.CloudWatchMetrics.init_ssm_client", return_value=ssmc + ) + + metrics = CloudWatchMetrics() + + assert metrics.metrics_enabled is True + + +# ------------------------------------------------------------------------------ +# This test verifies that sendAnonymizedMetrics defaults to no when the value is +# other than yes or no. +# ------------------------------------------------------------------------------ +def test_validate_ambiguous_sendanonymousmetrics(mocker): + ssmc = boto3.client("ssm", region_name=get_region()) + ssmc_s = Stubber(ssmc) + ssmc_s.add_response( + "get_parameter", mock_ssm_get_parameter_send_cloudwatch_bad_value + ) + ssmc_s.activate() + + mocker.patch( + "layer.cloudwatch_metrics.CloudWatchMetrics.init_ssm_client", return_value=ssmc + ) + + metrics = CloudWatchMetrics() + + assert metrics.send_cloudwatch_metrics_enabled() is False + + +# ------------------------------------------------------------------------------ +# This test verifies that send_metrics will post metrics when enabled via ssm +# ------------------------------------------------------------------------------ +def test_send_metrics(mocker): + event_state = "SUCCESS" + + ssmc = boto3.client("ssm", region_name=get_region()) + ssmc_s = Stubber(ssmc) + ssmc_s.add_response( + "get_parameter", mock_ssm_get_parameter_send_cloudwatch_metrics_yes + ) + ssmc_s.activate() + + mocker.patch( + "layer.cloudwatch_metrics.CloudWatchMetrics.init_ssm_client", return_value=ssmc + ) + + cloudwatch = boto3.client("cloudwatch") + cloudwatch_s = Stubber(cloudwatch) + cloudwatch_s.add_response("put_metric_data", {}) + cloudwatch_s.activate() + + mocker.patch( + "layer.cloudwatch_metrics.CloudWatchMetrics.init_cloudwatch_client", + return_value=cloudwatch, + ) + + metrics = CloudWatchMetrics() + metric_data = { + "MetricName": "Remediations", + "Dimensions": [ + { + "Name": "Outcome", + "Value": event_state, + }, + ], + "Unit": "Count", + "Value": 1, + } + + metrics.send_metric(metric_data) + cloudwatch_s.assert_no_pending_responses() diff --git a/source/LambdaLayers/test/test_json_data/CIS-1.3.json b/source/layer/test/test_json_data/CIS-1.3.json similarity index 100% rename from source/LambdaLayers/test/test_json_data/CIS-1.3.json rename to source/layer/test/test_json_data/CIS-1.3.json diff --git a/source/LambdaLayers/test/test_json_data/CIS-1.4.json b/source/layer/test/test_json_data/CIS-1.4.json similarity index 100% rename from source/LambdaLayers/test/test_json_data/CIS-1.4.json rename to source/layer/test/test_json_data/CIS-1.4.json diff --git a/source/LambdaLayers/test/test_json_data/CIS-bad.json b/source/layer/test/test_json_data/CIS-bad.json similarity index 100% rename from source/LambdaLayers/test/test_json_data/CIS-bad.json rename to source/layer/test/test_json_data/CIS-bad.json diff --git a/source/LambdaLayers/test/test_json_data/CIS_1-6-multi-select.json b/source/layer/test/test_json_data/CIS_1-6-multi-select.json similarity index 100% rename from source/LambdaLayers/test/test_json_data/CIS_1-6-multi-select.json rename to source/layer/test/test_json_data/CIS_1-6-multi-select.json diff --git a/source/LambdaLayers/test/test_json_data/CIS_1-6.json b/source/layer/test/test_json_data/CIS_1-6.json similarity index 100% rename from source/LambdaLayers/test/test_json_data/CIS_1-6.json rename to source/layer/test/test_json_data/CIS_1-6.json diff --git a/source/LambdaLayers/test/test_json_data/CIS_unsupversion.json b/source/layer/test/test_json_data/CIS_unsupversion.json similarity index 100% rename from source/LambdaLayers/test/test_json_data/CIS_unsupversion.json rename to source/layer/test/test_json_data/CIS_unsupversion.json diff --git a/source/LambdaLayers/test/test_json_data/afsbp-ec2.7.json b/source/layer/test/test_json_data/afsbp-ec2.7.json similarity index 100% rename from source/LambdaLayers/test/test_json_data/afsbp-ec2.7.json rename to source/layer/test/test_json_data/afsbp-ec2.7.json diff --git a/source/LambdaLayers/test/test_json_data/custom-action-mismatch.json b/source/layer/test/test_json_data/custom-action-mismatch.json similarity index 100% rename from source/LambdaLayers/test/test_json_data/custom-action-mismatch.json rename to source/layer/test/test_json_data/custom-action-mismatch.json diff --git a/source/LambdaLayers/test/test_logger.py b/source/layer/test/test_logger.py similarity index 68% rename from source/LambdaLayers/test/test_logger.py rename to source/layer/test/test_logger.py index c1a37f9f..3dfcbd9a 100644 --- a/source/LambdaLayers/test/test_logger.py +++ b/source/layer/test/test_logger.py @@ -1,21 +1,23 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import pytest -from pytest_mock import mocker -from logger import Logger +from layer.logger import Logger + def test_logger_init_debug(): - logger_test = Logger(loglevel='debug') + logger_test = Logger(loglevel="debug") assert logger_test.log.getEffectiveLevel() == 10 + def test_logger_init_info(): - logger_test = Logger(loglevel='info') + logger_test = Logger(loglevel="info") assert logger_test.log.getEffectiveLevel() == 20 + def test_logger_init_warning(): - logger_test = Logger(loglevel='warning') + logger_test = Logger(loglevel="warning") assert logger_test.log.getEffectiveLevel() == 30 + # TODO # 1. Add a test for DateTimeEncoder # 2. Add a test for _format diff --git a/source/layer/test/test_metrics.py b/source/layer/test/test_metrics.py new file mode 100644 index 00000000..b87cc46e --- /dev/null +++ b/source/layer/test/test_metrics.py @@ -0,0 +1,241 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import os + +import boto3 +from botocore.stub import Stubber +from layer.metrics import Metrics + +from . import file_utilities as utils + +test_data = "test/test_json_data/" + + +def get_region(): + return os.getenv("AWS_DEFAULT_REGION") + + +mock_ssm_get_parameter_uuid = { + "Parameter": { + "Name": "/Solutions/SO0111/anonymous_metrics_uuid", + "Type": "String", + "Value": "11111111-1111-1111-1111-111111111111", + "Version": 1, + "LastModifiedDate": "2021-02-25T12:58:50.591000-05:00", + "ARN": f"arn:aws:ssm:{get_region()}:111111111111:parameter/Solutions/SO0111/anonymous_metrics_uuid", + "DataType": "text", + } +} +mock_ssm_get_parameter_version = { + "Parameter": { + "Name": "/Solutions/SO0111/solution_version", + "Type": "String", + "Value": "v1.2.0TEST", + "Version": 1, + "LastModifiedDate": "2021-02-25T12:58:50.591000-05:00", + "ARN": f"arn:aws:ssm:{get_region()}1:111111111111:parameter/Solutions/SO0111/solution_version", + "DataType": "text", + } +} + +mock_ssm_get_parameter_sendmetrics_yes = { + "Parameter": { + "Name": "/Solutions/SO0111/sendAnonymizedMetrics", + "Type": "String", + "Value": "Yes", + "Version": 1, + "LastModifiedDate": "2021-02-25T12:58:50.591000-05:00", + "ARN": f"arn:aws:ssm:{get_region()}:111111111111:parameter/Solutions/SO0111/sendAnonymizedMetrics", + "DataType": "text", + } +} + +mock_ssm_get_parameter_sendmetrics_no = { + "Parameter": { + "Name": "/Solutions/SO0111/sendAnonymizedMetrics", + "Type": "String", + "Value": "No", + "Version": 1, + "LastModifiedDate": "2021-02-25T12:58:50.591000-05:00", + "ARN": f"arn:aws:ssm:{get_region()}:111111111111:parameter/Solutions/SO0111/sendAnonymizedMetrics", + "DataType": "text", + } +} + +mock_ssm_get_parameter_sendmetrics_badvalue = { + "Parameter": { + "Name": "/Solutions/SO0111/sendAnonymizedMetrics", + "Type": "String", + "Value": "slartibartfast", + "Version": 1, + "LastModifiedDate": "2021-02-25T12:58:50.591000-05:00", + "ARN": f"arn:aws:ssm:{get_region()}:111111111111:parameter/Solutions/SO0111/sendAnonymizedMetrics", + "DataType": "text", + } +} + + +# ------------------------------------------------------------------------------ +# This test verifies that the metrics object is constructed correctly +# ------------------------------------------------------------------------------ +def test_metrics_construction(mocker): + ssmc = boto3.client("ssm", region_name=get_region()) + ssmc_s = Stubber(ssmc) + ssmc_s.add_response("get_parameter", mock_ssm_get_parameter_sendmetrics_yes) + ssmc_s.add_response("get_parameter", mock_ssm_get_parameter_uuid) + ssmc_s.add_response("get_parameter", mock_ssm_get_parameter_version) + ssmc_s.activate() + + mocker.patch("layer.metrics.Metrics.connect_to_ssm", return_value=ssmc) + + metrics = Metrics("unit-test") + + assert metrics.solution_uuid == "11111111-1111-1111-1111-111111111111" + assert metrics.solution_version == "v1.2.0TEST" + + +# ------------------------------------------------------------------------------ +# This test verifies that event data is parsed correctly +# ------------------------------------------------------------------------------ +def test_get_metrics_from_finding(mocker): + expected_response = { + "generator_id": "arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/1.3", + "type": "1.3 Ensure credentials unused for 90 days or greater are disabled", + "productArn": "arn:aws:securityhub:" + + get_region() + + "::product/aws/securityhub", + "finding_triggered_by": "unit-test", + "region": mocker.ANY, + } + + finding = ( + utils.load_test_data(test_data + "CIS-1.3.json", get_region()) + .get("detail") + .get("findings")[0] + ) + + ssmc = boto3.client("ssm", region_name=get_region()) + ssmc_s = Stubber(ssmc) + ssmc_s.add_response("get_parameter", mock_ssm_get_parameter_sendmetrics_yes) + ssmc_s.add_response("get_parameter", mock_ssm_get_parameter_uuid) + ssmc_s.add_response("get_parameter", mock_ssm_get_parameter_version) + ssmc_s.activate() + + mocker.patch("layer.metrics.Metrics.connect_to_ssm", return_value=ssmc) + + metrics = Metrics("unit-test") + + assert metrics.get_metrics_from_finding(finding) == expected_response + + +# ------------------------------------------------------------------------------ +# This test verifies that sendAnonymizedMetrics defaults to no when the value is +# other than yes or no. +# ------------------------------------------------------------------------------ +def test_validate_ambiguous_sendanonymousmetrics(mocker): + ssmc = boto3.client("ssm", region_name=get_region()) + ssmc_s = Stubber(ssmc) + ssmc_s.add_response("get_parameter", mock_ssm_get_parameter_sendmetrics_badvalue) + ssmc_s.add_response("get_parameter", mock_ssm_get_parameter_uuid) + ssmc_s.add_response("get_parameter", mock_ssm_get_parameter_version) + ssmc_s.activate() + + mocker.patch("layer.metrics.Metrics.connect_to_ssm", return_value=ssmc) + + metrics = Metrics("unit-test") + + assert metrics.send_anonymous_metrics_enabled() is False + + +# ------------------------------------------------------------------------------ +# This test verifies that send_metrics will post metrics when enabled via ssm +# ------------------------------------------------------------------------------ +def test_send_metrics(mocker): + expected_response = { + "Solution": "SO0111", + "UUID": "11111111-1111-1111-1111-111111111111", + "TimeStamp": mocker.ANY, + "Data": { + "generator_id": "arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/1.3", + "type": "1.3 Ensure credentials unused for 90 days or greater are disabled", + "productArn": mocker.ANY, + "finding_triggered_by": "unit-test", + "region": mocker.ANY, + }, + "Version": "v1.2.0TEST", + } + + finding = ( + utils.load_test_data(test_data + "CIS-1.3.json", get_region()) + .get("detail") + .get("findings")[0] + ) + + ssmc = boto3.client("ssm", region_name=get_region()) + ssmc_s = Stubber(ssmc) + ssmc_s.add_response("get_parameter", mock_ssm_get_parameter_sendmetrics_yes) + ssmc_s.add_response("get_parameter", mock_ssm_get_parameter_uuid) + ssmc_s.add_response("get_parameter", mock_ssm_get_parameter_version) + ssmc_s.add_response("get_parameter", mock_ssm_get_parameter_sendmetrics_yes) + ssmc_s.activate() + + mocker.patch("layer.metrics.Metrics.connect_to_ssm", return_value=ssmc) + + metrics = Metrics("unit-test") + metrics_data = metrics.get_metrics_from_finding(finding) + assert metrics_data == { + "generator_id": "arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/1.3", + "type": "1.3 Ensure credentials unused for 90 days or greater are disabled", + "productArn": f"arn:aws:securityhub:{get_region()}::product/aws/securityhub", + "finding_triggered_by": "unit-test", + "region": get_region(), + } + + send_metrics = mocker.patch( + "layer.metrics.Metrics.post_metrics_to_api", return_value=None + ) + + metrics.send_metrics(metrics_data) + + send_metrics.assert_called_with(expected_response) + + +# ------------------------------------------------------------------------------ +# This test verifies that send_metrics takes the value from the SSM parameter +# WHEN METRICS ARE SENT. It does not assume that if the metrics object exists +# then send metrics is enabled. +# ------------------------------------------------------------------------------ +def test_do_not_send_metrics(mocker): + finding = ( + utils.load_test_data(test_data + "CIS-1.3.json", get_region()) + .get("detail") + .get("findings")[0] + ) + + ssmc = boto3.client("ssm", region_name=get_region()) + ssmc_s = Stubber(ssmc) + ssmc_s.add_response("get_parameter", mock_ssm_get_parameter_sendmetrics_yes) + ssmc_s.add_response("get_parameter", mock_ssm_get_parameter_uuid) + ssmc_s.add_response("get_parameter", mock_ssm_get_parameter_version) + ssmc_s.add_response("get_parameter", mock_ssm_get_parameter_sendmetrics_no) + ssmc_s.activate() + + mocker.patch("layer.metrics.Metrics.connect_to_ssm", return_value=ssmc) + + metrics = Metrics("unit-test") + metrics_data = metrics.get_metrics_from_finding(finding) + assert metrics_data == { + "generator_id": "arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/1.3", + "type": "1.3 Ensure credentials unused for 90 days or greater are disabled", + "productArn": f"arn:aws:securityhub:{get_region()}::product/aws/securityhub", + "finding_triggered_by": "unit-test", + "region": get_region(), + } + + send_metrics = mocker.patch( + "layer.metrics.Metrics.post_metrics_to_api", return_value=None + ) + + metrics.send_metrics(metrics_data) + + send_metrics.assert_not_called() diff --git a/source/layer/test/test_sechub_findings.py b/source/layer/test/test_sechub_findings.py new file mode 100644 index 00000000..d431db1c --- /dev/null +++ b/source/layer/test/test_sechub_findings.py @@ -0,0 +1,297 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Simple test to validate that the request format coming from the Cfn template +will turn into a valid API call. +""" +import json + +import boto3 +import layer.sechub_findings as findings +import pytest +from botocore.stub import Stubber +from layer.logger import Logger + +log_level = "info" +logger = Logger(loglevel=log_level) +test_data = "test/test_json_data/" + +my_session = boto3.session.Session() +my_region = my_session.region_name + + +# ------------------------------------------------------------------------------ +# CIS v1.2.0 +# ------------------------------------------------------------------------------ +def test_parse_cis_v120(mocker): + test_data_in = open(test_data + "CIS-1.3.json") + event = json.loads(test_data_in.read()) + test_data_in.close() + + ssmclient = boto3.client("ssm") + stubbed_ssm_client = Stubber(ssmclient) + stubbed_ssm_client.add_response( + "get_parameter", + { + "Parameter": { + "Name": "/Solutions/SO0111/cis-aws-foundations-benchmark/1.2.0/shortname", + "Type": "String", + "Value": "CIS", + "Version": 1, + "LastModifiedDate": "2021-04-23T08:11:30.658000-04:00", + "ARN": f"arn:aws:ssm:{my_region}:111111111111:parameter/Solutions/SO0111/cis-aws-foundations-benchmark/1.2.0/shortname", + "DataType": "text", + } + }, + ) + stubbed_ssm_client.add_client_error( + "get_parameter", "ParameterNotFound", "The requested parameter does not exist" + ) + stubbed_ssm_client.add_response( + "get_parameter", + { + "Parameter": { + "Name": "/Solutions/SO0111/cis-aws-foundations-benchmark/1.2.0", + "Type": "String", + "Value": "enabled", + "Version": 1, + "LastModifiedDate": "2021-04-23T08:12:13.893000-04:00", + "ARN": f"arn:aws:ssm:{my_region}:111111111111:parameter/Solutions/SO0111/cis-aws-foundations-benchmark/version", + "DataType": "text", + } + }, + ) + stubbed_ssm_client.activate() + + mocker.patch("layer.sechub_findings.get_ssm_connection", return_value=ssmclient) + + finding = findings.Finding(event["detail"]["findings"][0]) + assert finding.details.get("Id") == event["detail"]["findings"][0]["Id"] + assert ( + finding.generator_id + == "arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/1.3" + ) + assert finding.account_id == "111111111111" + assert finding.standard_name == "cis-aws-foundations-benchmark" + assert finding.standard_shortname == "CIS" + assert finding.standard_version == "1.2.0" + assert finding.standard_control == "1.3" + assert finding.standard_version_supported == "True" + + stubbed_ssm_client.deactivate() + + +# ------------------------------------------------------------------------------ +# +# ------------------------------------------------------------------------------ +def test_parse_bad_imported(): + test_file = open(test_data + "CIS-bad.json") + event = json.loads(test_file.read()) + test_file.close() + + with pytest.raises(findings.InvalidFindingJson): + findings.Finding(event["detail"]["findings"][0]) + + +# ------------------------------------------------------------------------------ +# CIS v1.7.0 finding should show unsupported +# ------------------------------------------------------------------------------ +def test_parse_unsupported_version(mocker): + test_data_in = open(test_data + "CIS_unsupversion.json") + event = json.loads(test_data_in.read()) + test_data_in.close() + + ssmclient = boto3.client("ssm") + stubbed_ssm_client = Stubber(ssmclient) + + stubbed_ssm_client.add_response( + "get_parameter", + { + "Parameter": { + "Name": "/Solutions/SO0111/cis-aws-foundations-benchmark/1.7.0/shortname", + "Type": "String", + "Value": "CIS", + "Version": 1, + "LastModifiedDate": "2021-04-23T08:11:30.658000-04:00", + "ARN": f"arn:aws:ssm:{my_region}:111111111111:parameter/Solutions/SO0111/cis-aws-foundations-benchmark/1.7.0/shortname", + "DataType": "text", + } + }, + ) + + stubbed_ssm_client.add_client_error( + "get_parameter", "ParameterNotFound", "The requested parameter does not exist" + ) + stubbed_ssm_client.activate() + + mocker.patch("layer.sechub_findings.get_ssm_connection", return_value=ssmclient) + + finding = findings.Finding(event["detail"]["findings"][0]) + + assert finding.details.get("Id") == event["detail"]["findings"][0]["Id"] + assert ( + finding.generator_id + == "arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.7.0/rule/1.6" + ) + assert finding.account_id == "111111111111" + assert finding.standard_name == "cis-aws-foundations-benchmark" + assert finding.standard_shortname == "CIS" + assert finding.standard_version == "1.7.0" + assert finding.standard_control == "1.6" + assert finding.standard_version_supported == "False" + + stubbed_ssm_client.deactivate() + + +# ------------------------------------------------------------------------------ +# AFSBP v1.0.0 +# ------------------------------------------------------------------------------ +def test_parse_afsbp_v100(mocker): + test_data_in = open(test_data + "afsbp-ec2.7.json") + event = json.loads(test_data_in.read()) + test_data_in.close() + + ssmclient = boto3.client("ssm") + stubbed_ssm_client = Stubber(ssmclient) + + stubbed_ssm_client.add_response( + "get_parameter", + { + "Parameter": { + "Name": "/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0/shortname", + "Type": "String", + "Value": "AFSBP", + "Version": 1, + "LastModifiedDate": "2021-04-23T08:11:30.658000-04:00", + "ARN": f"arn:aws:ssm:{my_region}:111111111111:parameter/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0/shortname", + "DataType": "text", + } + }, + ) + stubbed_ssm_client.add_client_error( + "get_parameter", "ParameterNotFound", "The requested parameter does not exist" + ) + stubbed_ssm_client.add_response( + "get_parameter", + { + "Parameter": { + "Name": "/Solutions/SO0111/aws-foundational-security-best-practices/1.0.0", + "Type": "String", + "Value": "enabled", + "Version": 1, + "LastModifiedDate": "2021-04-23T08:12:13.893000-04:00", + "ARN": f"arn:aws:ssm:us-{my_region}-1:111111111111:parameter/Solutions/SO0111/aws-foundational-security-best-practices/version", + "DataType": "text", + } + }, + ) + stubbed_ssm_client.activate() + + mocker.patch("layer.sechub_findings.get_ssm_connection", return_value=ssmclient) + + finding = findings.Finding(event["detail"]["findings"][0]) + assert finding.details.get("Id") == event["detail"]["findings"][0]["Id"] + assert finding.account_id == "111111111111" + assert finding.standard_name == "aws-foundational-security-best-practices" + assert finding.standard_shortname == "AFSBP" + assert finding.standard_version == "1.0.0" + assert finding.standard_control == "EC2.7" + assert finding.standard_version_supported == "True" + + stubbed_ssm_client.deactivate() + + +# ------------------------------------------------------------------------------ +# Security Standard not found +# ------------------------------------------------------------------------------ +def test_undefined_security_standard(mocker): + test_data_in = open(test_data + "afsbp-ec2.7.json") + event = json.loads(test_data_in.read()) + test_data_in.close() + + event["detail"]["findings"][0]["ProductFields"][ + "StandardsControlArn" + ] = "arn:aws:securityhub:::standards/aws-invalid-security-standard/v/1.2.3/ABC.1" + + ssmclient = boto3.client("ssm") + stubbed_ssm_client = Stubber(ssmclient) + + stubbed_ssm_client.add_client_error( + "get_parameter", "ParameterNotFound", "The requested parameter does not exist" + ) + + stubbed_ssm_client.add_client_error( + "get_parameter", "ParameterNotFound", "The requested parameter does not exist" + ) + + stubbed_ssm_client.add_client_error( + "get_parameter", "ParameterNotFound", "The requested parameter does not exist" + ) + + stubbed_ssm_client.activate() + + mocker.patch("layer.sechub_findings.get_ssm_connection", return_value=ssmclient) + + finding = findings.Finding(event["detail"]["findings"][0]) + assert finding.details.get("Id") == event["detail"]["findings"][0]["Id"] + assert finding.account_id == "111111111111" + assert finding.standard_name == "aws-invalid-security-standard" + assert finding.standard_shortname == "error" + assert finding.security_standard == "notfound" + assert finding.standard_version == "1.2.3" + assert finding.standard_control == "ABC.1" + assert finding.standard_version_supported == "False" + + stubbed_ssm_client.deactivate() + + +def test_security_control(mocker): + test_data_in = open(test_data + "afsbp-ec2.7.json") + event = json.loads(test_data_in.read()) + test_data_in.close() + + event["detail"]["findings"][0]["ProductFields"]["StandardsControlArn"] = None + event["detail"]["findings"][0]["Compliance"]["SecurityControlId"] = "EC2.7" + + ssmclient = boto3.client("ssm") + stubbed_ssm_client = Stubber(ssmclient) + + stubbed_ssm_client.add_response( + "get_parameter", + { + "Parameter": { + "Name": "/Solutions/SO0111/security-controls/2.0.0/shortname", + "Type": "String", + "Value": "SC", + "Version": 1, + "LastModifiedDate": "2021-04-23T08:11:30.658000-04:00", + "ARN": f"arn:aws:ssm:{my_region}:111111111111:parameter/Solutions/SO0111/security-controls/2.0.0/shortname", + "DataType": "text", + } + }, + ) + stubbed_ssm_client.add_client_error( + "get_parameter", "ParameterNotFound", "The requested parameter does not exist" + ) + stubbed_ssm_client.add_response( + "get_parameter", + { + "Parameter": { + "Name": "/Solutions/SO0111/security-controls/2.0.0/status", + "Type": "String", + "Value": "enabled", + "Version": 1, + "LastModifiedDate": "2021-04-23T08:12:13.893000-04:00", + "ARN": f"arn:aws:ssm:us-{my_region}-1:111111111111:parameter/Solutions/SO0111/security-controls/2.0.0/status", + "DataType": "text", + } + }, + ) + stubbed_ssm_client.activate() + + finding = findings.Finding(event["detail"]["findings"][0]) + assert finding.details.get("Id") == event["detail"]["findings"][0]["Id"] + assert finding.account_id == "111111111111" + assert finding.standard_name == "security-control" + assert finding.standard_version == "2.0.0" + assert finding.standard_control == "EC2.7" diff --git a/source/layer/test/test_utils.py b/source/layer/test/test_utils.py new file mode 100644 index 00000000..edaa69e7 --- /dev/null +++ b/source/layer/test/test_utils.py @@ -0,0 +1,20 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +from layer.utils import partition_from_region, resource_from_arn + + +def test_resource_from_arn(): + testarn1 = "arn:aws-us-gov:iam:us-gov-west-1:222222222222:root" + assert resource_from_arn(testarn1) == "root" + testarn2 = "arn:aws-cn:s3:::doc-example-bucket" + assert resource_from_arn(testarn2) == "doc-example-bucket" + testarn3 = "This is a non-arn string" + assert resource_from_arn(testarn3) == "This is a non-arn string" + + +def test_partition_from_region(): + assert partition_from_region("us-gov-west-1") == "aws-us-gov" + assert partition_from_region("cn-north-1") == "aws-cn" + # Note: does not validate region name. default expected + assert partition_from_region("foo") == "aws" + assert partition_from_region("eu-west-1") == "aws" diff --git a/source/layer/utils.py b/source/layer/utils.py new file mode 100644 index 00000000..e13d52b5 --- /dev/null +++ b/source/layer/utils.py @@ -0,0 +1,132 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import os +import re +from typing import Any + +import boto3 +from botocore.exceptions import UnknownRegionError +from layer.awsapi_cached_client import AWSCachedClient + +AWS_REGION = os.getenv("AWS_REGION", "us-east-1") + +properties = [ + "status", + "message", + "executionid", + "affected_object", + "remediation_status", + "logdata", + "securitystandard", + "securitystandardversion", + "standardsupported", + "controlid", + "accountid", + "automationdocid", + "remediationrole", + "workflowdoc", + "workflowaccount", + "workflowrole", + "eventtype", + "resourceregion", + "workflow_data", + "executionaccount", + "executionregion", +] + + +class StepFunctionLambdaAnswer: + """ + Maintains a hash of AWS API Client connections by region and service + """ + + status = "init" + message = "" + executionid = "" + affected_object = "" + remediation_status = "" + logdata: Any = [] + securitystandard = "" + securitystandardversion = "" + standardsupported = "" + controlid = "" + accountid = "" + automationdocid = "" + remediationrole = "" + workflowdoc = "" + workflowaccount = "" + eventtype = "" + resourceregion = "" + workflow_data: dict[str, str] = ( + {} + ) # Hash for workflow data so that it can be modified in + # in the future without changing the source code + + def __init__(self): + """Set message and status - minimum required fields""" + self.status = "" + self.message = "" + self.remediation_status = "" + self.logdata = [] + + def __str__(self): + return json.dumps(self.__dict__) + + def json(self): + return self.__dict__ + + def update(self, answer_data): + for property, value in answer_data.items(): + if property in properties: + setattr(self, property, value) + + +def resource_from_arn(arn): + """ + Strip off the leading parts of the ARN: arn:*:*:*:*: + Return what's left. If no match, return the original predicate. + """ + arn_pattern = re.compile(r"arn\:[\w,-]+:[\w,-]+:.*:\d*:(.*)") + arn_match = arn_pattern.match(arn) + answer = arn + if arn_match: + answer = arn_match.group(1) + return answer + + +def partition_from_region(region_name): + """ + returns the partition for a given region + On success returns a string + On failure returns aws + """ + partition = "" + session = boto3.Session() + try: + partition = session.get_partition_for_region(region_name) + except UnknownRegionError: + return "aws" + + return partition + + +def publish_to_sns(topic_name, message, region=""): + """ + Post a message to an SNS topic + """ + if not region: + region = AWS_REGION + partition = partition_from_region(region) + AWS = AWSCachedClient(region) # cached client object + account = boto3.client("sts").get_caller_identity()["Account"] + + topic_arn = f"arn:{partition}:sns:{region}:{account}:{topic_name}" + + message_id = ( + AWS.get_connection("sns", region) + .publish(TopicArn=topic_arn, Message=message) + .get("MessageId", "error") + ) + + return message_id diff --git a/source/lib/__snapshots__/member-stack.test.ts.snap b/source/lib/__snapshots__/member-stack.test.ts.snap index 6e752102..ffb123cc 100644 --- a/source/lib/__snapshots__/member-stack.test.ts.snap +++ b/source/lib/__snapshots__/member-stack.test.ts.snap @@ -11,6 +11,18 @@ exports[`member stack snapshot matches 1`] = ` "yes", ], }, + "ShouldDeployAppReg": { + "Fn::Not": [ + { + "Fn::Equals": [ + { + "Ref": "AWS::Partition", + }, + "aws-cn", + ], + }, + ], + }, "loadAFSBPCond": { "Fn::Equals": [ { @@ -35,6 +47,14 @@ exports[`member stack snapshot matches 1`] = ` "yes", ], }, + "loadNIST80053Cond": { + "Fn::Equals": [ + { + "Ref": "LoadNIST80053MemberStack", + }, + "yes", + ], + }, "loadPCI321Cond": { "Fn::Equals": [ { @@ -89,6 +109,7 @@ exports[`member stack snapshot matches 1`] = ` "LoadAFSBPMemberStack", "LoadCIS120MemberStack", "LoadCIS140MemberStack", + "LoadNIST80053MemberStack", "LoadPCI321MemberStack", "LoadSCMemberStack", ], @@ -138,6 +159,15 @@ exports[`member stack snapshot matches 1`] = ` "Description": "Load Playbook member stack for CIS140?", "Type": "String", }, + "LoadNIST80053MemberStack": { + "AllowedValues": [ + "yes", + "no", + ], + "Default": "yes", + "Description": "Load Playbook member stack for NIST80053?", + "Type": "String", + }, "LoadPCI321MemberStack": { "AllowedValues": [ "yes", @@ -168,6 +198,7 @@ exports[`member stack snapshot matches 1`] = ` }, "Resources": { "AppRegistry968496A3": { + "Condition": "ShouldDeployAppReg", "Properties": { "Description": "Service Catalog application to track and manage all your resources for the solution automated-security-response-on-aws", "Name": { @@ -227,6 +258,7 @@ exports[`member stack snapshot matches 1`] = ` "Type": "AWS::ServiceCatalogAppRegistry::Application", }, "AppRegistryAssociation": { + "Condition": "ShouldDeployAppReg", "Properties": { "Application": { "Fn::GetAtt": [ @@ -242,6 +274,7 @@ exports[`member stack snapshot matches 1`] = ` "Type": "AWS::ServiceCatalogAppRegistry::ResourceAssociation", }, "AppRegistryAttributeGroupAssociation8045a8dd9527B814DC7A": { + "Condition": "ShouldDeployAppReg", "Properties": { "Application": { "Fn::GetAtt": [ @@ -259,6 +292,7 @@ exports[`member stack snapshot matches 1`] = ` "Type": "AWS::ServiceCatalogAppRegistry::AttributeGroupAssociation", }, "DefaultApplicationAttributesFC1CC26B": { + "Condition": "ShouldDeployAppReg", "Properties": { "Attributes": { "applicationType": { @@ -342,6 +376,38 @@ exports[`member stack snapshot matches 1`] = ` }, "Type": "AWS::CloudFormation::WaitConditionHandle", }, + "NestedStackFactoryGatePlaybookMemberStackNIST80053C3D22DE7": { + "Metadata": { + "PlaybookMemberStackAFSBPReady": { + "Fn::If": [ + "loadAFSBPCond", + { + "Ref": "PlaybookMemberStackAFSBP", + }, + "", + ], + }, + "PlaybookMemberStackCIS120Ready": { + "Fn::If": [ + "loadCIS120Cond", + { + "Ref": "PlaybookMemberStackCIS120", + }, + "", + ], + }, + "PlaybookMemberStackCIS140Ready": { + "Fn::If": [ + "loadCIS140Cond", + { + "Ref": "PlaybookMemberStackCIS140", + }, + "", + ], + }, + }, + "Type": "AWS::CloudFormation::WaitConditionHandle", + }, "NestedStackFactoryGatePlaybookMemberStackPCI3214A12B906": { "Metadata": { "PlaybookMemberStackAFSBPReady": { @@ -371,6 +437,15 @@ exports[`member stack snapshot matches 1`] = ` "", ], }, + "PlaybookMemberStackNIST80053Ready": { + "Fn::If": [ + "loadNIST80053Cond", + { + "Ref": "PlaybookMemberStackNIST80053", + }, + "", + ], + }, }, "Type": "AWS::CloudFormation::WaitConditionHandle", }, @@ -403,6 +478,15 @@ exports[`member stack snapshot matches 1`] = ` "", ], }, + "PlaybookMemberStackNIST80053Ready": { + "Fn::If": [ + "loadNIST80053Cond", + { + "Ref": "PlaybookMemberStackNIST80053", + }, + "", + ], + }, "PlaybookMemberStackPCI321Ready": { "Fn::If": [ "loadPCI321Cond", @@ -555,6 +639,53 @@ exports[`member stack snapshot matches 1`] = ` "Type": "AWS::CloudFormation::Stack", "UpdateReplacePolicy": "Delete", }, + "PlaybookMemberStackNIST80053": { + "Condition": "loadNIST80053Cond", + "DeletionPolicy": "Delete", + "DependsOn": [ + "NestedStackFactoryGatePlaybookMemberStackNIST80053C3D22DE7", + "RunbookStackNoRoles", + ], + "Properties": { + "Parameters": { + "SecHubAdminAccount": { + "Ref": "SecHubAdminAccount", + }, + "WaitProviderServiceToken": { + "Fn::GetAtt": [ + "WaitProviderFunction3D90ED36", + "Arn", + ], + }, + }, + "TemplateURL": { + "Fn::Join": [ + "", + [ + "https://", + { + "Fn::FindInMap": [ + "NestedStackFactorySourceCodeA11A36A7", + "General", + "S3Bucket", + ], + }, + "-reference.s3.amazonaws.com/", + { + "Fn::FindInMap": [ + "NestedStackFactorySourceCodeA11A36A7", + "General", + "KeyPrefix", + ], + }, + "/playbooks/NIST80053MemberStack.template", + ], + ], + }, + }, + "Type": "AWS::CloudFormation::Stack", + "UpdateReplacePolicy": "Delete", + }, "PlaybookMemberStackPCI321": { "Condition": "loadPCI321Cond", "DeletionPolicy": "Delete", @@ -725,6 +856,9 @@ exports[`member stack snapshot matches 1`] = ` "IgnorePublicAcls": true, "RestrictPublicBuckets": true, }, + "VersioningConfiguration": { + "Status": "Enabled", + }, }, "Type": "AWS::S3::Bucket", "UpdateReplacePolicy": "Retain", @@ -943,7 +1077,7 @@ exports[`member stack snapshot matches 1`] = ` }, "SSMParameterForS34EncryptionKeyAlias73DD8A98": { "Properties": { - "Description": "Parameter to store encryption key alias for the PCI.S3.4/AFSBP.S3.4, replace the default value with the KMS Key Alias, other wise the remediation will enable the default AES256 encryption for the bucket.", + "Description": "Parameter to store encryption key alias for the PCI.S3.4/FSBP.S3.4, replace the default value with the KMS Key Alias, other wise the remediation will enable the default AES256 encryption for the bucket.", "Name": "/Solutions/SO9999/afsbp/1.0.0/S3.4/KmsKeyAlias", "Type": "String", "Value": "default-s3-encryption", @@ -956,7 +1090,7 @@ exports[`member stack snapshot matches 1`] = ` "S3BucketForRedShiftAuditLogging652E7355", ], "Properties": { - "Description": "Parameter to store the S3 bucket name for the remediation AFSBP.REDSHIFT.4, the default value is bucket-name which has to be updated by the user before using the remediation.", + "Description": "Parameter to store the S3 bucket name for the remediation FSBP.REDSHIFT.4, the default value is bucket-name which has to be updated by the user before using the remediation.", "Name": "/Solutions/SO9999/afsbp/1.0.0/REDSHIFT.4/S3BucketNameForAuditLogging", "Type": "String", "Value": { @@ -980,16 +1114,6 @@ exports[`member stack snapshot matches 1`] = ` "DependsOn": [ "WaitProviderRole83B0295F", ], - "Metadata": { - "cdk_nag": { - "rules_to_suppress": [ - { - "id": "AwsSolutions-L1", - "reason": "Will upgrade in next release to prioritize patch", - }, - ], - }, - }, "Properties": { "Code": { "S3Bucket": { diff --git a/source/lib/appregistry/applyAppRegistry.ts b/source/lib/appregistry/applyAppRegistry.ts index b96760b8..d499a4a1 100644 --- a/source/lib/appregistry/applyAppRegistry.ts +++ b/source/lib/appregistry/applyAppRegistry.ts @@ -1,9 +1,15 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { Aws, CfnMapping, Fn, Stack } from 'aws-cdk-lib'; +import { Aspects, Aws, CfnCondition, CfnMapping, Fn, Stack } from 'aws-cdk-lib'; import { Application, AttributeGroup } from '@aws-cdk/aws-servicecatalogappregistry-alpha'; import { applyTag } from '../tags/applyTag'; -import { CfnResourceAssociation } from 'aws-cdk-lib/aws-servicecatalogappregistry'; +import { + CfnAttributeGroup, + CfnAttributeGroupAssociation, + CfnResourceAssociation, +} from 'aws-cdk-lib/aws-servicecatalogappregistry'; +import { ConditionAspect } from '../cdk-helper/condition-aspect'; +import setCondition from '../cdk-helper/set-condition'; export interface AppRegisterProps { solutionId: string; @@ -19,6 +25,7 @@ export class AppRegister { private solutionVersion: string; private appRegistryApplicationName: string; private applicationType: string; + private shouldDeployAppRegCondition?: CfnCondition; constructor(props: AppRegisterProps) { this.solutionId = props.solutionId; @@ -36,19 +43,25 @@ export class AppRegister { * Do not create ApplicationInsights. This may sometimes fail. */ public applyAppRegistryToStacks(hubStack: Stack, nestedStacks: Stack[]) { + this.shouldDeployAppRegCondition = new CfnCondition(hubStack, 'ShouldDeployAppReg', { + expression: Fn.conditionNot(Fn.conditionEquals(Aws.PARTITION, 'aws-cn')), + }); + const application = this.createAppRegistry(hubStack); // Do not create resource share // Do not associated spoke stacks, we must allow different regions + setCondition(application, this.shouldDeployAppRegCondition); + Aspects.of(application).add(new ConditionAspect(this.shouldDeployAppRegCondition, CfnAttributeGroupAssociation)); + let suffix = 1; nestedStacks.forEach((nestedStack) => { - const association = new CfnResourceAssociation(application, `ResourceAssociation${suffix++}`, { + const association = new CfnResourceAssociation(application, `ResourceAssociation${suffix}`, { application: application.applicationId, resource: nestedStack.stackId, resourceType: 'CFN_STACK', }); - // If the nested stack is conditional, the resource association must also be so on the same condition association.cfnOptions.condition = nestedStack.nestedStackResource?.cfnOptions.condition; if (nestedStack.nestedStackResource) { @@ -56,7 +69,11 @@ export class AppRegister { } else { throw new Error('No nested stack resource'); } + suffix++; }); + + Aspects.of(hubStack).add(new ConditionAspect(this.shouldDeployAppRegCondition, CfnResourceAssociation)); + Aspects.of(hubStack).add(new ConditionAspect(this.shouldDeployAppRegCondition, CfnAttributeGroup)); } private createAppRegistry(stack: Stack): Application { diff --git a/source/lib/cdk-helper/add-cfn-nag-suppression.test.ts b/source/lib/cdk-helper/add-cfn-nag-suppression.test.ts index 24c52336..c6676b18 100644 --- a/source/lib/cdk-helper/add-cfn-nag-suppression.test.ts +++ b/source/lib/cdk-helper/add-cfn-nag-suppression.test.ts @@ -11,7 +11,7 @@ describe('add cfn-nag suppression', function () { const suppression: CfnNagSuppression = { id: 'my id', reason: 'my reason' }; addCfnNagSuppression(bucket, suppression); expect((bucket.node.defaultChild as CfnResource).cfnOptions.metadata?.cfn_nag?.rules_to_suppress).toStrictEqual( - expect.arrayContaining([suppression]) + expect.arrayContaining([suppression]), ); }); @@ -25,7 +25,7 @@ describe('add cfn-nag suppression', function () { }; addCfnNagSuppression(bucket, secondSuppression); expect((bucket.node.defaultChild as CfnResource).cfnOptions.metadata?.cfn_nag?.rules_to_suppress).toStrictEqual( - expect.arrayContaining([firstSuppression, secondSuppression]) + expect.arrayContaining([firstSuppression, secondSuppression]), ); }); }); diff --git a/source/lib/cdk-helper/condition-aspect.ts b/source/lib/cdk-helper/condition-aspect.ts new file mode 100644 index 00000000..bf17c8eb --- /dev/null +++ b/source/lib/cdk-helper/condition-aspect.ts @@ -0,0 +1,34 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { CfnCondition, CfnResource, Fn, IAspect } from 'aws-cdk-lib'; +import { IConstruct } from 'constructs'; + +export class ConditionAspect CfnResource> implements IAspect { + constructor( + private condition: CfnCondition, + private resourceType?: T, + ) {} + + visit(node: IConstruct): void { + if (node instanceof (this.resourceType ?? CfnResource)) { + if (node.cfnOptions.condition) { + const parentStack = node.cfnOptions.condition?.stack; + const existingConditionName = parentStack.resolve(node.cfnOptions.condition).Condition; + const newConditionName = parentStack.resolve(this.condition).Condition; + if (existingConditionName !== newConditionName) { + const combinedName = `${existingConditionName}And${newConditionName}`; + + const compoundCondition = + (parentStack.node.tryFindChild(combinedName) as CfnCondition) ?? + new CfnCondition(parentStack, combinedName, { + expression: Fn.conditionAnd(this.condition, node.cfnOptions.condition), + }); + + node.cfnOptions.condition = compoundCondition; + } + } else { + node.cfnOptions.condition = this.condition; + } + } + } +} diff --git a/source/lib/cdk-helper/nested-stack.ts b/source/lib/cdk-helper/nested-stack.ts index f6222c31..0c4f68d3 100644 --- a/source/lib/cdk-helper/nested-stack.ts +++ b/source/lib/cdk-helper/nested-stack.ts @@ -68,7 +68,7 @@ export class SerializedNestedStackFactory extends Construct { this.conditionalNestedStacks.forEach(function (previousStack: ConditionalNestedStack) { dummyResource.addMetadata( `${previousStack.stack.logicalId}Ready`, - Fn.conditionIf(previousStack.condition.logicalId, Fn.ref(previousStack.stack.logicalId), '') + Fn.conditionIf(previousStack.condition.logicalId, Fn.ref(previousStack.stack.logicalId), ''), ); }); cfnStack.addDependency(dummyResource); diff --git a/source/lib/cloudwatch_metrics.ts b/source/lib/cloudwatch_metrics.ts new file mode 100644 index 00000000..ce85709e --- /dev/null +++ b/source/lib/cloudwatch_metrics.ts @@ -0,0 +1,326 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { CfnCondition, CfnParameter, Duration, Fn } from 'aws-cdk-lib'; +import { StringParameter } from 'aws-cdk-lib/aws-ssm'; +import { Construct } from 'constructs'; +import setCondition from './cdk-helper/set-condition'; +import { + ComparisonOperator, + Dashboard, + GraphWidget, + MathExpression, + Metric, + TextWidget, + TreatMissingData, + Unit, +} from 'aws-cdk-lib/aws-cloudwatch'; +import { Topic } from 'aws-cdk-lib/aws-sns'; +import { Key } from 'aws-cdk-lib/aws-kms'; +import { SnsAction } from 'aws-cdk-lib/aws-cloudwatch-actions'; +import { ServicePrincipal } from 'aws-cdk-lib/aws-iam'; + +export interface CloudWatchMetricsProps { + solutionId: string; + schedulingQueueName: string; + orchStateMachineArn: string; + kmsKey: Key; +} + +export class CloudWatchMetrics { + private readonly parameters: CfnParameter[] = []; + private useCloudWatchMetrics: CfnParameter; + + constructor(scope: Construct, props: CloudWatchMetricsProps) { + const RESOURCE_PREFIX = props.solutionId.replace(/^DEV-/, ''); // prefix on every resource name + + props.kmsKey.grantEncryptDecrypt(new ServicePrincipal('cloudwatch.amazonaws.com')); + + /// CloudWatch Metrics + this.useCloudWatchMetrics = new CfnParameter(scope, 'UseCloudWatchMetrics', { + type: 'String', + description: + 'Enable collection of operational metrics and create a CloudWatch dashboard to monitor solution operations', + default: 'yes', + allowedValues: ['yes', 'no'], + }); + this.parameters.push(this.useCloudWatchMetrics); + + const isUsingCloudWatchMetrics = new CfnCondition(scope, 'isUsingCloudWatchMetrics', { + expression: Fn.conditionEquals(this.useCloudWatchMetrics, 'yes'), + }); + + const useCloudWatchMetricsAlarms = new CfnParameter(scope, 'UseCloudWatchMetricsAlarms', { + type: 'String', + description: 'Create CloudWatch Alarms for gathered metrics', + default: 'yes', + allowedValues: ['yes', 'no'], + }); + this.parameters.push(useCloudWatchMetricsAlarms); + + const isUsingCloudWatchMetricsAlarms = new CfnCondition(scope, 'isUsingCloudWatchMetricsAlarms', { + expression: Fn.conditionAnd(isUsingCloudWatchMetrics, Fn.conditionEquals(useCloudWatchMetricsAlarms, 'yes')), + }); + + const stateMachineExecutionsAlarmThreshold = new CfnParameter(scope, 'StateMachineExecutionsAlarmThreshold', { + type: 'Number', + description: 'Number of executions in one period to trigger the state machine executions alarm', + default: 1000, + }); + this.parameters.push(stateMachineExecutionsAlarmThreshold); + + const sendCloudwatchMetricsParameter = new StringParameter(scope, 'ASR_SendCloudWatchMetrics', { + description: 'Flag to enable or disable sending cloudwatch metrics.', + parameterName: '/Solutions/' + RESOURCE_PREFIX + '/sendCloudwatchMetrics', + stringValue: 'yes', + }); + setCondition(sendCloudwatchMetricsParameter, isUsingCloudWatchMetrics); + + const defaultDuration = Duration.days(1); + + const lambdaErrorMetric = new Metric({ + namespace: 'ASR', + metricName: 'RemediationOutcome', + statistic: 'Sum', + period: defaultDuration, + dimensionsMap: { Outcome: 'LAMBDAERROR' }, + label: 'LAMBDAERROR', + }); + + const remediationNotActiveErrorMetric = new Metric({ + namespace: 'ASR', + metricName: 'RemediationOutcome', + statistic: 'Sum', + period: defaultDuration, + dimensionsMap: { Outcome: 'REMEDIATIONNOTTACTIVE' }, + label: 'REMEDIATIONNOTTACTIVE', + }); + + const noRemediationErrorMetric = new Metric({ + namespace: 'ASR', + metricName: 'RemediationOutcome', + statistic: 'Sum', + period: defaultDuration, + dimensionsMap: { Outcome: 'NOREMEDIATION' }, + label: 'NOREMEDIATION', + }); + + const standardNotEnabledErrorMetric = new Metric({ + namespace: 'ASR', + metricName: 'RemediationOutcome', + statistic: 'Sum', + period: defaultDuration, + dimensionsMap: { Outcome: 'STANDARDNOTENABLED' }, + label: 'STANDARDNOTENABLED', + }); + + const successMetric = new Metric({ + namespace: 'ASR', + metricName: 'RemediationOutcome', + statistic: 'Sum', + period: defaultDuration, + dimensionsMap: { Outcome: 'SUCCESS' }, + label: 'SUCCESS', + }); + + const waitTimeMetric = new Metric({ + namespace: 'ASR', + metricName: 'RemediationSchedulingDelay', + statistic: 'Maximum', + unit: Unit.SECONDS, + period: defaultDuration, + label: 'Delay', + }); + + const failedExpression = new MathExpression({ + label: 'FAILURE', + period: defaultDuration, + expression: 'SUM([m1+m2+m3+m4])', + usingMetrics: { + ['m1']: lambdaErrorMetric, + ['m2']: remediationNotActiveErrorMetric, + ['m3']: noRemediationErrorMetric, + ['m4']: standardNotEnabledErrorMetric, + }, + }); + + const failedAssumeRoleMetric = new Metric({ + namespace: 'ASR', + metricName: 'AssumeRoleFailure', + statistic: 'Sum', + period: defaultDuration, + label: 'Runbook Assume Role Failures', + }); + + const queueLengthMetric = new Metric({ + namespace: 'AWS/SQS', + metricName: 'ApproximateNumberOfMessagesVisible', + statistic: 'Maximum', + period: defaultDuration, + label: 'Queue Length', + dimensionsMap: { + QueueName: props.schedulingQueueName, + }, + }); + + const stateMachineExecutionsMetric = new Metric({ + namespace: 'AWS/States', + metricName: 'ExecutionsStarted', + statistic: 'Sum', + period: defaultDuration, + label: 'Remediations started', + dimensionsMap: { + StateMachineArn: props.orchStateMachineArn, + }, + }); + + /// CloudWatch Alarms + const snsAlarmTopic = new Topic(scope, 'ASR-Alarm-Topic', { + displayName: 'ASR Alarm Topic (' + RESOURCE_PREFIX + ')', + topicName: RESOURCE_PREFIX + '-ASR_Alarm_Topic', + masterKey: props.kmsKey, + }); + setCondition(snsAlarmTopic, isUsingCloudWatchMetricsAlarms); + + const noRemediationErrorAlarm = noRemediationErrorMetric.createAlarm(scope, 'NoRemediationErrorAlarm', { + alarmName: 'ASR-NoRemediation', + evaluationPeriods: 1, + threshold: 1, + comparisonOperator: ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD, + alarmDescription: + 'Remediation failed with NOREMEDIATION result. This indicates a remediation was attempted for an unsupported remediation', + treatMissingData: TreatMissingData.NOT_BREACHING, + datapointsToAlarm: 1, + actionsEnabled: true, + }); + setCondition(noRemediationErrorAlarm, isUsingCloudWatchMetricsAlarms); + noRemediationErrorAlarm.addAlarmAction(new SnsAction(snsAlarmTopic)); + + const failedAssumeRoleAlarm = failedAssumeRoleMetric.createAlarm(scope, 'FailedAssumeRoleAlarm', { + alarmName: 'ASR-RunbookAssumeRoleFailure', + evaluationPeriods: 1, + threshold: 1, + comparisonOperator: ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD, + alarmDescription: + 'ASR Runbook Failed to assume role in an account. This indicates that a remediation was attempted in an account that does not have ASR deployed.', + treatMissingData: TreatMissingData.NOT_BREACHING, + datapointsToAlarm: 1, + actionsEnabled: true, + }); + setCondition(failedAssumeRoleAlarm, isUsingCloudWatchMetricsAlarms); + failedAssumeRoleAlarm.addAlarmAction(new SnsAction(snsAlarmTopic)); + + const stateMachineExecutionsAlarm = stateMachineExecutionsMetric.createAlarm(scope, 'StateMachineExecutions', { + alarmName: 'ASR-StateMachineExecutions', + evaluationPeriods: 1, + threshold: stateMachineExecutionsAlarmThreshold.valueAsNumber, + comparisonOperator: ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD, + alarmDescription: 'Number of executed remediations is higher than normal. Check other metrics.', + treatMissingData: TreatMissingData.NOT_BREACHING, + datapointsToAlarm: 1, + }); + + setCondition(stateMachineExecutionsAlarm, isUsingCloudWatchMetricsAlarms); + stateMachineExecutionsAlarm.addAlarmAction(new SnsAction(snsAlarmTopic)); + + /// CloudWatch Dashboard + const remediationDashboard = new Dashboard(scope, 'RemediationDashboard', { + dashboardName: 'ASR-Remediation-Metrics-Dashboard', + defaultInterval: Duration.days(7), + }); + setCondition(remediationDashboard, isUsingCloudWatchMetrics); + + remediationDashboard.addWidgets( + new GraphWidget({ + title: 'State Machine Executions', + left: [stateMachineExecutionsMetric], + leftAnnotations: [stateMachineExecutionsAlarm.toAnnotation()], + }), + new GraphWidget({ + title: 'Remediation Outcomes', + left: [failedExpression, successMetric], + leftYAxis: { + showUnits: false, + }, + }), + new GraphWidget({ + title: 'Remediation Failures by Type', + left: [ + lambdaErrorMetric, + remediationNotActiveErrorMetric, + noRemediationErrorMetric, + standardNotEnabledErrorMetric, + ], + leftAnnotations: [noRemediationErrorAlarm.toAnnotation()], + leftYAxis: { + showUnits: false, + }, + }), + new TextWidget({ + markdown: ` +## Remediation Failures by Type +This widget displays the frequency of different remediation outcomes. + +If there is an increase in \`NOREMEDIATION\` results, this indicates that remediations are being attempted for remediations not currently included in ASR. You should verify that this is not caused by a modified automatic remediation rule. +`, + height: 6, + }), + ); + + remediationDashboard.addWidgets( + new GraphWidget({ + title: 'Remediation Scheduling Queue Length', + left: [queueLengthMetric], + }), + new GraphWidget({ + title: 'Maximum Remediation Delay', + left: [waitTimeMetric], + }), + new TextWidget({ + markdown: ` +## Remediation Scheduling Widgets +These widgets are related to scheduling of remediations. + +Triggered remediations are inserted into a queue and a scheduling Lambda picks them up to schedule the remediation execution. + +The queue length represents the maximum number of triggered remediations that were waiting to be scheduled during that period. + +The maximum delay is how far out, in seconds, that the scheduling Lambda has scheduled a remediation for execution. +`, + height: 6, + }), + ); + + remediationDashboard.addWidgets( + new GraphWidget({ + title: 'Runbook Assume Role Failures', + left: [failedAssumeRoleMetric], + leftAnnotations: [failedAssumeRoleAlarm.toAnnotation()], + leftYAxis: { + showUnits: false, + }, + }), + new TextWidget({ + markdown: ` +## Runbook Assume Role Failures +This widget displays the frequency of the remediation lambda failing to assume the role necessary to remediate on a different account. + +This may indicate that ASR is attempting to remediate on a spoke account that does not have ASR installed. +`, + height: 6, + }), + ); + } + + public getParameterIds(): string[] { + return this.parameters.map((p) => p.logicalId); + } + + public getParameterIdsAndLabels() { + return this.parameters.reduce((a, p) => ({ ...a, [p.logicalId]: { default: p.logicalId } }), {}); + } + + public getCloudWatchMetricsParameterValue(): string { + return this.useCloudWatchMetrics.valueAsString; + } +} diff --git a/source/lib/common-orchestrator-construct.ts b/source/lib/common-orchestrator-construct.ts index 93ed0baf..13128d4d 100644 --- a/source/lib/common-orchestrator-construct.ts +++ b/source/lib/common-orchestrator-construct.ts @@ -4,11 +4,12 @@ import { Stack, Duration, RemovalPolicy, CfnParameter, CfnResource, Fn, NestedSt import { PolicyDocument, PolicyStatement, Role, Effect, ServicePrincipal, CfnRole } from 'aws-cdk-lib/aws-iam'; import * as lambda from 'aws-cdk-lib/aws-lambda'; import * as sfn from 'aws-cdk-lib/aws-stepfunctions'; -import { LambdaInvoke } from 'aws-cdk-lib/aws-stepfunctions-tasks'; +import { LambdaInvoke, SqsSendMessage } from 'aws-cdk-lib/aws-stepfunctions-tasks'; import { StringParameter } from 'aws-cdk-lib/aws-ssm'; import { Construct } from 'constructs'; import * as cdk_nag from 'cdk-nag'; import { Timeout } from 'aws-cdk-lib/aws-stepfunctions'; +import { IQueue } from 'aws-cdk-lib/aws-sqs'; export interface ConstructProps { roleArn: string; @@ -22,6 +23,7 @@ export interface ConstructProps { solutionVersion: string; orchLogGroup: string; kmsKeyParm: StringParameter; // to force dependency + sqsQueue: IQueue; } export class OrchestratorConstruct extends Construct { @@ -64,7 +66,7 @@ export class OrchestratorConstruct extends Construct { 'getRequirementFunc', { functionArn: props.getApprovalRequirementLambda, - } + }, ); const orchestratorFailed = new sfn.Pass(this, 'Orchestrator Failed', { @@ -112,6 +114,25 @@ export class OrchestratorConstruct extends Construct { }); getApprovalRequirement.addCatch(orchestratorFailed); + const remediationWait = new sfn.Wait(this, 'Remediation Wait', { + comment: 'Waiting for remediation', + time: sfn.WaitTime.timestampPath('$.PlannedTimestamp'), + }); + + const sendTaskToken = new SqsSendMessage(this, 'Send Task Token', { + comment: 'Send Task Token to SQS Queue for Remediation Scheduling', + integrationPattern: sfn.IntegrationPattern.WAIT_FOR_TASK_TOKEN, + queue: props.sqsQueue, + messageBody: sfn.TaskInput.fromObject({ + RemediationDetails: sfn.JsonPath.stringAt('$'), + TaskToken: sfn.JsonPath.taskToken, + AccountId: sfn.JsonPath.stringAt('$.AutomationDocument.AccountId'), + ResourceRegion: sfn.JsonPath.stringAt('$.AutomationDocument.ResourceRegion'), + executionId: sfn.JsonPath.stringAt('$$.Execution.Id'), + }), + }); + sendTaskToken.addCatch(orchestratorFailed); + const remediateFinding = new LambdaInvoke(this, 'Execute Remediation', { comment: 'Execute the SSM Automation Document in the target account', lambdaFunction: execRemediationFunc, @@ -334,10 +355,10 @@ export class OrchestratorConstruct extends Construct { sfn.Condition.stringEquals('$.EventType', 'Security Hub Findings - Custom Action'), sfn.Condition.and( sfn.Condition.stringEquals('$.Finding.Workflow.Status', 'NEW'), - sfn.Condition.stringEquals('$.EventType', 'Security Hub Findings - Imported') - ) + sfn.Condition.stringEquals('$.EventType', 'Security Hub Findings - Imported'), + ), ), - getApprovalRequirement + getApprovalRequirement, ); checkWorkflowNew.otherwise(docNotNew); @@ -348,7 +369,7 @@ export class OrchestratorConstruct extends Construct { getApprovalRequirement.next(getDocState); - checkDocState.when(sfn.Condition.stringEquals('$.AutomationDocument.DocState', 'ACTIVE'), remediateFinding); + checkDocState.when(sfn.Condition.stringEquals('$.AutomationDocument.DocState', 'ACTIVE'), sendTaskToken); checkDocState.when(sfn.Condition.stringEquals('$.AutomationDocument.DocState', 'NOTACTIVE'), docStateNotActive); checkDocState.when(sfn.Condition.stringEquals('$.AutomationDocument.DocState', 'NOTENABLED'), standardNotEnabled); checkDocState.when(sfn.Condition.stringEquals('$.AutomationDocument.DocState', 'NOTFOUND'), controlNoRemediation); @@ -365,6 +386,9 @@ export class OrchestratorConstruct extends Construct { // Execute the remediation // remediateFinding.next(execMonitor) + sendTaskToken.next(remediationWait); + + remediationWait.next(remediateFinding); // Send a notification remediateFinding.next(remediationQueued); @@ -407,7 +431,7 @@ export class OrchestratorConstruct extends Construct { ], effect: Effect.ALLOW, resources: ['*'], - }) + }), ); orchestratorPolicy.addStatements( new PolicyStatement({ @@ -420,14 +444,24 @@ export class OrchestratorConstruct extends Construct { `arn:${stack.partition}:lambda:${stack.region}:${stack.account}:function:${notifyFunc.functionName}`, `arn:${stack.partition}:lambda:${stack.region}:${stack.account}:function:${getApprovalRequirementFunc.functionName}`, ], - }) + }), ); orchestratorPolicy.addStatements( new PolicyStatement({ actions: ['kms:Encrypt', 'kms:Decrypt', 'kms:GenerateDataKey'], effect: Effect.ALLOW, - resources: [`arn:${stack.partition}:kms:${stack.region}:${stack.account}:alias/${RESOURCE_PREFIX}-SHARR-Key`], - }) + resources: [ + `arn:${stack.partition}:kms:${stack.region}:${stack.account}:alias/${RESOURCE_PREFIX}-SHARR-Key`, + props.kmsKeyParm.stringValue, + ], + }), + ); + orchestratorPolicy.addStatements( + new PolicyStatement({ + actions: ['sqs:SendMessage'], + effect: Effect.ALLOW, + resources: [props.sqsQueue.queueArn], + }), ); const principal = new ServicePrincipal(`states.amazonaws.com`); @@ -438,7 +472,6 @@ export class OrchestratorConstruct extends Construct { }, }); orchestratorRole.applyRemovalPolicy(RemovalPolicy.RETAIN); - { const childToMod = orchestratorRole.node.defaultChild as CfnRole; childToMod.cfnOptions.metadata = { @@ -463,9 +496,9 @@ export class OrchestratorConstruct extends Construct { ]); const orchestratorStateMachine = new sfn.StateMachine(this, 'StateMachine', { - definition: extractFindings, + definitionBody: sfn.DefinitionBody.fromChainable(extractFindings), stateMachineName: `${RESOURCE_PREFIX}-SHARR-Orchestrator`, - timeout: Duration.minutes(15), + timeout: Duration.minutes(90), role: orchestratorRole, }); @@ -531,7 +564,7 @@ export class OrchestratorConstruct extends Construct { Fn.findInMap('SourceCode', 'General', 'S3Bucket') + '-reference.s3.amazonaws.com/' + Fn.findInMap('SourceCode', 'General', 'KeyPrefix') + - '/aws-sharr-orchestrator-log.template' + '/aws-sharr-orchestrator-log.template', ); return logStack; } diff --git a/source/lib/member/bucket-encryption.ts b/source/lib/member/bucket-encryption.ts index 6d976310..7c56f17f 100644 --- a/source/lib/member/bucket-encryption.ts +++ b/source/lib/member/bucket-encryption.ts @@ -15,7 +15,7 @@ export class MemberBucketEncryption extends Construct { new StringParameter(scope, 'SSMParameterForS3.4EncryptionKeyAlias', { description: - 'Parameter to store encryption key alias for the PCI.S3.4/AFSBP.S3.4, replace the default value with the KMS Key Alias, other wise the remediation will enable the default AES256 encryption for the bucket.', + 'Parameter to store encryption key alias for the PCI.S3.4/FSBP.S3.4, replace the default value with the KMS Key Alias, other wise the remediation will enable the default AES256 encryption for the bucket.', parameterName: `/Solutions/${props.solutionId}/afsbp/1.0.0/S3.4/KmsKeyAlias`, stringValue: 'default-s3-encryption', }); diff --git a/source/lib/member/redshift-audit-logging.ts b/source/lib/member/redshift-audit-logging.ts index 03992bcf..84a3c990 100644 --- a/source/lib/member/redshift-audit-logging.ts +++ b/source/lib/member/redshift-audit-logging.ts @@ -30,7 +30,9 @@ export class RedshiftAuditLogging extends Construct { expression: Fn.conditionEquals(templateParam.valueAsString, ChoiceParam.Yes), }); - const bucket = new Bucket(scope, 'S3BucketForRedShiftAuditLogging', { //NOSONAR The policy attached to this bucket enforces SSL. + const bucket = new Bucket(scope, 'S3BucketForRedShiftAuditLogging', { + //NOSONAR The policy attached to this bucket enforces SSL. + versioned: true, encryption: BucketEncryption.S3_MANAGED, publicReadAccess: false, blockPublicAccess: BlockPublicAccess.BLOCK_ALL, @@ -67,7 +69,7 @@ export class RedshiftAuditLogging extends Construct { principals: [new StarPrincipal()], resources: [bucket.bucketArn, bucket.arnForObjects('*')], conditions: { Bool: { ['aws:SecureTransport']: 'false' } }, - }) + }), ); setCondition(bucketPolicy, condition); bucketPolicy.node.addDependency(bucket.node.defaultChild as CfnBucket); @@ -78,7 +80,7 @@ export class RedshiftAuditLogging extends Construct { const ssmParam = new StringParameter(scope, 'SSMParameterForS3BucketNameForREDSHIFT4', { description: - 'Parameter to store the S3 bucket name for the remediation AFSBP.REDSHIFT.4, the default value is bucket-name which has to be updated by the user before using the remediation.', + 'Parameter to store the S3 bucket name for the remediation FSBP.REDSHIFT.4, the default value is bucket-name which has to be updated by the user before using the remediation.', parameterName: `/Solutions/${props.solutionId}/afsbp/1.0.0/REDSHIFT.4/S3BucketNameForAuditLogging`, stringValue: bucket.bucketName, }); diff --git a/source/lib/member/remediation-key.ts b/source/lib/member/remediation-key.ts index 2247d688..3f5e2071 100644 --- a/source/lib/member/remediation-key.ts +++ b/source/lib/member/remediation-key.ts @@ -31,7 +31,7 @@ export class MemberRemediationKey extends Construct { 'kms:ReEncryptFrom', 'kms:ReEncryptTo', 'kms:DescribeKey', - 'kms:DescribeCustomKeyStores' + 'kms:DescribeCustomKeyStores', ); kmsPerms.effect = Effect.ALLOW; kmsPerms.addResources('*'); // Only the key the policydocument is attached to diff --git a/source/lib/orchestrator-log-stack.ts b/source/lib/orchestrator-log-stack.ts index b75a4cb3..b03c12ce 100644 --- a/source/lib/orchestrator-log-stack.ts +++ b/source/lib/orchestrator-log-stack.ts @@ -39,7 +39,7 @@ export class OrchLogStack extends cdk.Stack { const orchestratorLogGroupEncrypted: LogGroup = new LogGroup(this, 'Orchestrator-Logs-Encrypted', { logGroupName: props.logGroupName, removalPolicy: cdk.RemovalPolicy.RETAIN, - retention: RetentionDays.ONE_YEAR, + retention: RetentionDays.TEN_YEARS, encryptionKey: kmsKey, }); @@ -49,7 +49,7 @@ export class OrchLogStack extends cdk.Stack { const orchestratorLogGroupNOTEncrypted: LogGroup = new LogGroup(this, 'Orchestrator-Logs', { logGroupName: props.logGroupName, removalPolicy: cdk.RemovalPolicy.RETAIN, - retention: RetentionDays.ONE_YEAR, + retention: RetentionDays.TEN_YEARS, }); /******************* @@ -70,7 +70,7 @@ export class OrchLogStack extends cdk.Stack { childToMod.cfnOptions.condition = new cdk.CfnCondition(this, 'Unencrypted Log Group', { expression: cdk.Fn.conditionAnd( cdk.Fn.conditionNot(isNotGovCloud), - cdk.Fn.conditionEquals(reuseOrchLogGroup.valueAsString, 'no') + cdk.Fn.conditionEquals(reuseOrchLogGroup.valueAsString, 'no'), ), }); childToMod.cfnOptions.metadata = { diff --git a/source/lib/orchestrator_roles-construct.ts b/source/lib/orchestrator_roles-construct.ts index fe53feaf..070a1ab7 100644 --- a/source/lib/orchestrator_roles-construct.ts +++ b/source/lib/orchestrator_roles-construct.ts @@ -67,7 +67,7 @@ export class OrchestratorMemberRole extends Construct { resource: 'automation-execution', resourceName: '*', arnFormat: ArnFormat.SLASH_RESOURCE_NAME, - }) + }), ); memberPolicy.addStatements(ssmRWPerms); @@ -92,7 +92,7 @@ export class OrchestratorMemberRole extends Construct { actions: ['config:DescribeConfigRules'], resources: ['*'], effect: Effect.ALLOW, - }) + }), ); const sechubPerms = new PolicyStatement(); @@ -109,7 +109,7 @@ export class OrchestratorMemberRole extends Construct { principalPolicyStatement.effect = Effect.ALLOW; const roleprincipal = new ArnPrincipal( - `arn:${stack.partition}:iam::${props.adminAccountId}:role/${props.adminRoleName}` + `arn:${stack.partition}:iam::${props.adminAccountId}:role/${props.adminRoleName}`, ); const principals = new CompositePrincipal(roleprincipal); diff --git a/source/lib/remediation_runbook-stack.ts b/source/lib/remediation_runbook-stack.ts index cc026669..eab4922f 100644 --- a/source/lib/remediation_runbook-stack.ts +++ b/source/lib/remediation_runbook-stack.ts @@ -22,7 +22,6 @@ import { Rds6EnhancedMonitoringRole } from './rds6-remediation-resources'; import { RunbookFactory } from './runbook_factory'; import { SNS2DeliveryStatusLoggingRole } from './sns2-remediation-resources'; import { SsmRole } from './ssmplaybook'; -import { StringParameter } from 'aws-cdk-lib/aws-ssm'; import { Aspects, CfnParameter } from 'aws-cdk-lib'; import { WaitProvider } from './wait-provider'; import SsmDocRateLimit from './ssm-doc-rate-limit'; @@ -74,7 +73,7 @@ export class RemediationRunbookStack extends cdk.Stack { const waitProvider = WaitProvider.fromServiceToken( this, 'WaitProvider', - waitProviderServiceTokenParam.valueAsString + waitProviderServiceTokenParam.valueAsString, ); Aspects.of(this).add(new SsmDocRateLimit(waitProvider)); @@ -94,7 +93,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'CreateCloudTrailMultiRegionTrail'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const cloudtrailPerms = new PolicyStatement(); cloudtrailPerms.addActions('cloudtrail:CreateTrail', 'cloudtrail:UpdateTrail', 'cloudtrail:StartLogging'); cloudtrailPerms.effect = Effect.ALLOW; @@ -109,7 +108,7 @@ export class RemediationRunbookStack extends cdk.Stack { 's3:PutBucketLogging', 's3:PutBucketAcl', 's3:PutBucketPolicy', - 's3:PutBucketOwnershipControls' + 's3:PutBucketOwnershipControls', ); s3Perms.effect = Effect.ALLOW; s3Perms.addResources(`arn:${this.partition}:s3:::so0111-*`); @@ -155,7 +154,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'CreateLogMetricFilterAndAlarm'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPolicy = new PolicyStatement(); remediationPolicy.addActions('logs:PutMetricFilter', 'cloudwatch:PutMetricAlarm'); @@ -195,7 +194,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'EnableAutoScalingGroupELBHealthCheck'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const asPerms = new PolicyStatement(); asPerms.addActions('autoscaling:UpdateAutoScalingGroup', 'autoscaling:DescribeAutoScalingGroups'); asPerms.effect = Effect.ALLOW; @@ -237,7 +236,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'EnableAWSConfig'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); { const iamPerms = new PolicyStatement(); @@ -245,7 +244,7 @@ export class RemediationRunbookStack extends cdk.Stack { iamPerms.effect = Effect.ALLOW; iamPerms.addResources( `arn:${this.partition}:iam::${this.account}:role/aws-service-role/config.amazonaws.com/AWSServiceRoleForConfig`, - `arn:${this.partition}:iam::${this.account}:role/SO0111-CreateAccessLoggingBucket` + `arn:${this.partition}:iam::${this.account}:role/SO0111-CreateAccessLoggingBucket`, ); inlinePolicy.addStatements(iamPerms); } @@ -261,7 +260,7 @@ export class RemediationRunbookStack extends cdk.Stack { ssmPerms.addActions('ssm:StartAutomationExecution'); ssmPerms.effect = Effect.ALLOW; ssmPerms.addResources( - `arn:${this.partition}:ssm:*:${this.account}:automation-definition/ASR-CreateAccessLoggingBucket:*` + `arn:${this.partition}:ssm:*:${this.account}:automation-definition/ASR-CreateAccessLoggingBucket:*`, ); inlinePolicy.addStatements(ssmPerms); } @@ -272,7 +271,7 @@ export class RemediationRunbookStack extends cdk.Stack { 'config:PutConfigurationRecorder', 'config:PutDeliveryChannel', 'config:DescribeConfigurationRecorders', - 'config:StartConfigurationRecorder' + 'config:StartConfigurationRecorder', ); configPerms.effect = Effect.ALLOW; configPerms.addResources(`*`); @@ -286,7 +285,7 @@ export class RemediationRunbookStack extends cdk.Stack { 's3:PutBucketPublicAccessBlock', 's3:PutBucketLogging', 's3:PutBucketAcl', - 's3:PutBucketPolicy' + 's3:PutBucketPolicy', ); s3Perms.effect = Effect.ALLOW; s3Perms.addResources(`arn:${this.partition}:s3:::so0111-*`); @@ -327,7 +326,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'EnableCloudTrailToCloudWatchLogging'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); // Role for CT->CW logging const ctcw_remediation_policy_statement_1 = new PolicyStatement(); @@ -426,7 +425,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'EnableCloudTrailEncryption'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const cloudtrailPerms = new PolicyStatement(); cloudtrailPerms.addActions('cloudtrail:UpdateTrail'); @@ -475,13 +474,13 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'EnableDefaultEncryptionS3'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); inlinePolicy.addStatements( new PolicyStatement({ actions: ['s3:PutEncryptionConfiguration', 'kms:GenerateDataKey'], resources: ['*'], effect: Effect.ALLOW, - }) + }), ); new SsmRole(props.roleStack, 'RemediationRole ' + remediationName, { solutionId: props.solutionId, @@ -523,7 +522,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'EnableVPCFlowLogs'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); { const remediationPerms = new PolicyStatement(); @@ -531,7 +530,7 @@ export class RemediationRunbookStack extends cdk.Stack { remediationPerms.effect = Effect.ALLOW; remediationPerms.addResources( `arn:${this.partition}:ec2:*:${this.account}:vpc/*`, - `arn:${this.partition}:ec2:*:${this.account}:vpc-flow-log/*` + `arn:${this.partition}:ec2:*:${this.account}:vpc-flow-log/*`, ); inlinePolicy.addStatements(remediationPerms); } @@ -540,7 +539,7 @@ export class RemediationRunbookStack extends cdk.Stack { iamPerms.addActions('iam:PassRole'); iamPerms.effect = Effect.ALLOW; iamPerms.addResources( - `arn:${this.partition}:iam::${this.account}:role/${RESOURCE_PREFIX}-${remediationName}-remediationRole` + `arn:${this.partition}:iam::${this.account}:role/${RESOURCE_PREFIX}-${remediationName}-remediationRole`, ); inlinePolicy.addStatements(iamPerms); } @@ -549,7 +548,7 @@ export class RemediationRunbookStack extends cdk.Stack { ssmPerms.addActions('ssm:GetParameter'); ssmPerms.effect = Effect.ALLOW; ssmPerms.addResources( - `arn:${this.partition}:ssm:*:${this.account}:parameter/${RESOURCE_PREFIX}/CMK_REMEDIATION_ARN` + `arn:${this.partition}:ssm:*:${this.account}:parameter/${RESOURCE_PREFIX}/CMK_REMEDIATION_ARN`, ); inlinePolicy.addStatements(ssmPerms); } @@ -569,7 +568,7 @@ export class RemediationRunbookStack extends cdk.Stack { 'logs:CreateLogStream', 'logs:DescribeLogGroups', 'logs:DescribeLogStreams', - 'logs:PutLogEvents' + 'logs:PutLogEvents', ); remediation_policy.addResources('*'); @@ -637,13 +636,13 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'CreateAccessLoggingBucket'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const s3Perms = new PolicyStatement(); s3Perms.addActions( 's3:CreateBucket', 's3:PutEncryptionConfiguration', 's3:PutBucketAcl', - 's3:PutBucketOwnershipControls' + 's3:PutBucketOwnershipControls', ); s3Perms.effect = Effect.ALLOW; s3Perms.addResources('*'); @@ -685,7 +684,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'MakeEBSSnapshotsPrivate'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const ec2Perms = new PolicyStatement(); ec2Perms.addActions('ec2:ModifySnapshotAttribute', 'ec2:DescribeSnapshots'); ec2Perms.effect = Effect.ALLOW; @@ -730,7 +729,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'MakeRDSSnapshotPrivate'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPerms = new PolicyStatement(); remediationPerms.addActions('rds:ModifyDBSnapshotAttribute', 'rds:ModifyDBClusterSnapshotAttribute'); remediationPerms.effect = Effect.ALLOW; @@ -775,7 +774,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'RemoveLambdaPublicAccess'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const lambdaPerms = new PolicyStatement(); lambdaPerms.addActions('lambda:GetPolicy', 'lambda:RemovePermission'); @@ -821,13 +820,13 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'RevokeUnrotatedKeys'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPolicy = new PolicyStatement(); remediationPolicy.addActions( 'iam:UpdateAccessKey', 'iam:ListAccessKeys', 'iam:GetAccessKeyLastUsed', - 'iam:GetUser' + 'iam:GetUser', ); remediationPolicy.effect = Effect.ALLOW; remediationPolicy.addResources('arn:' + this.partition + ':iam::' + this.account + ':user/*'); @@ -874,7 +873,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'SetSSLBucketPolicy'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); { const remediationPerms = new PolicyStatement(); @@ -919,14 +918,14 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'ReplaceCodeBuildClearTextCredentials'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPolicy = new PolicyStatement(); remediationPolicy.addActions( 'codeBuild:BatchGetProjects', 'codeBuild:UpdateProject', 'ssm:PutParameter', - 'iam:CreatePolicy' + 'iam:CreatePolicy', ); remediationPolicy.effect = Effect.ALLOW; remediationPolicy.addResources('*'); @@ -943,7 +942,7 @@ export class RemediationRunbookStack extends cdk.Stack { attachRolePolicyDeny.addActions('iam:AttachRolePolicy'); attachRolePolicyDeny.effect = Effect.DENY; attachRolePolicyDeny.addResources( - `arn:${this.partition}:iam::${this.account}:role/${remediationRoleNameBase}${remediationName}` + `arn:${this.partition}:iam::${this.account}:role/${remediationRoleNameBase}${remediationName}`, ); inlinePolicy.addStatements(attachRolePolicyDeny); @@ -981,7 +980,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'S3BlockDenylist'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPolicy = new PolicyStatement(); remediationPolicy.addActions('s3:PutBucketPolicy', 's3:GetBucketPolicy'); @@ -1024,16 +1023,17 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'EncryptRDSSnapshot'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPolicy = new PolicyStatement(); remediationPolicy.addActions( + 'rds:AddTagsToResource', 'rds:CopyDBSnapshot', 'rds:CopyDBClusterSnapshot', 'rds:DescribeDBSnapshots', 'rds:DescribeDBClusterSnapshots', 'rds:DeleteDBSnapshot', - 'rds:DeleteDBClusterSnapshots' + 'rds:DeleteDBClusterSnapshots', ); remediationPolicy.effect = Effect.ALLOW; remediationPolicy.addResources('*'); @@ -1074,7 +1074,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'DisablePublicAccessToRedshiftCluster'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPolicy = new PolicyStatement(); remediationPolicy.addActions('redshift:ModifyCluster', 'redshift:DescribeClusters'); @@ -1116,7 +1116,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'EnableRedshiftClusterAuditLogging'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPolicy = new PolicyStatement(); remediationPolicy.addActions('redshift:DescribeLoggingStatus', 'redshift:EnableLogging'); @@ -1163,7 +1163,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'EnableAutomaticVersionUpgradeOnRedshiftCluster'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPolicy = new PolicyStatement(); remediationPolicy.addActions('redshift:ModifyCluster', 'redshift:DescribeClusters'); @@ -1206,7 +1206,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'EnableAutomaticSnapshotsOnRedshiftCluster'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPolicy = new PolicyStatement(); remediationPolicy.addActions('redshift:ModifyCluster', 'redshift:DescribeClusters'); @@ -1249,7 +1249,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'CreateIAMSupportRole'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const roleName = 'aws_incident_support_role'; const iamPerms = new PolicyStatement(); iamPerms.addActions('iam:GetRole', 'iam:CreateRole', 'iam:AttachRolePolicy', 'iam:TagRole'); @@ -1261,7 +1261,7 @@ export class RemediationRunbookStack extends cdk.Stack { denyAddPermsToSelf.addActions('iam:AttachRolePolicy'); denyAddPermsToSelf.effect = Effect.DENY; denyAddPermsToSelf.addResources( - `arn:${this.partition}:iam::${this.account}:role/${remediationRoleNameBase}${remediationName}` + `arn:${this.partition}:iam::${this.account}:role/${remediationRoleNameBase}${remediationName}`, ); inlinePolicy.addStatements(denyAddPermsToSelf); @@ -1306,7 +1306,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'EnableEncryptionForSQSQueue'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPolicy = new PolicyStatement(); remediationPolicy.addActions('sqs:GetQueueUrl', 'sqs:SetQueueAttributes', 'sqs:GetQueueAttributes'); @@ -1348,7 +1348,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'ConfigureSNSTopicForStack'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const cfnPerms = new PolicyStatement(); cfnPerms.addActions('cloudformation:DescribeStacks', 'cloudformation:UpdateStack'); cfnPerms.effect = Effect.ALLOW; @@ -1359,7 +1359,7 @@ export class RemediationRunbookStack extends cdk.Stack { snsPerms.addActions('sns:CreateTopic', 'sns:Publish'); snsPerms.effect = Effect.ALLOW; snsPerms.addResources( - `arn:${this.partition}:sns:${this.region}:${this.account}:SO0111-ASR-CloudFormationNotifications` + `arn:${this.partition}:sns:${this.region}:${this.account}:SO0111-ASR-CloudFormationNotifications`, ); inlinePolicy.addStatements(snsPerms); @@ -1410,7 +1410,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'ConfigureS3BucketLogging'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const s3Perms = new PolicyStatement(); s3Perms.addActions('s3:PutBucketLogging', 's3:CreateBucket', 's3:PutEncryptionConfiguration'); @@ -1444,7 +1444,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'DisablePublicAccessForSecurityGroup'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPermsEc2 = new PolicyStatement(); remediationPermsEc2.addActions( @@ -1453,7 +1453,7 @@ export class RemediationRunbookStack extends cdk.Stack { 'ec2:UpdateSecurityGroupRuleDescriptionsEgress', 'ec2:UpdateSecurityGroupRuleDescriptionsIngress', 'ec2:RevokeSecurityGroupIngress', - 'ec2:RevokeSecurityGroupEgress' + 'ec2:RevokeSecurityGroupEgress', ); remediationPermsEc2.effect = Effect.ALLOW; remediationPermsEc2.addResources('*'); @@ -1490,7 +1490,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'ConfigureS3BucketPublicAccessBlock'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPolicy = new PolicyStatement(); remediationPolicy.addActions('s3:PutBucketPublicAccessBlock', 's3:GetBucketPublicAccessBlock'); @@ -1532,7 +1532,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'ConfigureS3PublicAccessBlock'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPolicy = new PolicyStatement(); remediationPolicy.addActions('s3:PutAccountPublicAccessBlock', 's3:GetAccountPublicAccessBlock'); @@ -1574,7 +1574,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'EnableCloudTrailLogFileValidation'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPolicy = new PolicyStatement(); remediationPolicy.addActions('cloudtrail:UpdateTrail', 'cloudtrail:GetTrail'); remediationPolicy.effect = Effect.ALLOW; @@ -1604,7 +1604,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'EnableEbsEncryptionByDefault'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const ec2Perms = new PolicyStatement(); ec2Perms.addActions('ec2:EnableEBSEncryptionByDefault', 'ec2:GetEbsEncryptionByDefault'); ec2Perms.effect = Effect.ALLOW; @@ -1648,13 +1648,13 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'EnableEnhancedMonitoringOnRDSInstance'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); { const iamPerms = new PolicyStatement(); iamPerms.addActions('iam:GetRole', 'iam:PassRole'); iamPerms.effect = Effect.ALLOW; iamPerms.addResources( - `arn:${this.partition}:iam::${this.account}:role/${RESOURCE_PREFIX}-RDSMonitoring-remediationRole` + `arn:${this.partition}:iam::${this.account}:role/${RESOURCE_PREFIX}-RDSMonitoring-remediationRole`, ); inlinePolicy.addStatements(iamPerms); } @@ -1707,7 +1707,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'EnableKeyRotation'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPerms = new PolicyStatement(); remediationPerms.addActions('kms:EnableKeyRotation', 'kms:GetKeyRotationStatus'); remediationPerms.effect = Effect.ALLOW; @@ -1751,7 +1751,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'EnableRDSClusterDeletionProtection'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const iamPerms = new PolicyStatement(); iamPerms.addActions('iam:GetRole'); @@ -1809,7 +1809,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'EnableCopyTagsToSnapshotOnRDSCluster'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const iamPerms = new PolicyStatement(); iamPerms.addActions('iam:GetRole'); @@ -1867,7 +1867,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'EnableRDSInstanceDeletionProtection'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const rdsPerms = new PolicyStatement(); rdsPerms.addActions('rds:DescribeDBInstances', 'rds:ModifyDBInstance'); @@ -1909,7 +1909,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'EnableMultiAZOnRDSInstance'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const rdsPerms = new PolicyStatement(); rdsPerms.addActions('rds:DescribeDBInstances', 'rds:ModifyDBInstance'); @@ -1951,14 +1951,14 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'RemoveVPCDefaultSecurityGroupRules'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPolicy1 = new PolicyStatement(); remediationPolicy1.addActions( 'ec2:UpdateSecurityGroupRuleDescriptionsEgress', 'ec2:UpdateSecurityGroupRuleDescriptionsIngress', 'ec2:RevokeSecurityGroupIngress', - 'ec2:RevokeSecurityGroupEgress' + 'ec2:RevokeSecurityGroupEgress', ); remediationPolicy1.effect = Effect.ALLOW; remediationPolicy1.addResources('arn:' + this.partition + ':ec2:*:' + this.account + ':security-group/*'); @@ -2008,7 +2008,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'RevokeUnusedIAMUserCredentials'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPolicy = new PolicyStatement(); remediationPolicy.addActions( 'iam:UpdateAccessKey', @@ -2016,7 +2016,7 @@ export class RemediationRunbookStack extends cdk.Stack { 'iam:GetAccessKeyLastUsed', 'iam:GetUser', 'iam:GetLoginProfile', - 'iam:DeleteLoginProfile' + 'iam:DeleteLoginProfile', ); remediationPolicy.effect = Effect.ALLOW; remediationPolicy.addResources('arn:' + this.partition + ':iam::' + this.account + ':user/*'); @@ -2062,7 +2062,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'SetIAMPasswordPolicy'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPolicy = new PolicyStatement(); remediationPolicy.addActions( @@ -2070,7 +2070,7 @@ export class RemediationRunbookStack extends cdk.Stack { 'iam:GetAccountPasswordPolicy', 'ec2:UpdateSecurityGroupRuleDescriptionsIngress', 'ec2:RevokeSecurityGroupIngress', - 'ec2:RevokeSecurityGroupEgress' + 'ec2:RevokeSecurityGroupEgress', ); remediationPolicy.effect = Effect.ALLOW; remediationPolicy.addResources('*'); @@ -2110,7 +2110,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'DisablePublicAccessToRDSInstance'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPolicy = new PolicyStatement(); remediationPolicy.addActions('rds:DescribeDBInstances', 'rds:ModifyDBInstance'); @@ -2152,14 +2152,14 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'EnableMinorVersionUpgradeOnRDSDBInstance'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPolicy = new PolicyStatement(); remediationPolicy.addActions( 'rds:DescribeDBInstances', 'rds:ModifyDBInstance', 'rds:DescribeDBClusters', - 'rds:ModifyDBCluster' + 'rds:ModifyDBCluster', ); remediationPolicy.effect = Effect.ALLOW; remediationPolicy.addResources('*'); @@ -2199,7 +2199,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'EnableEncryptionForSNSTopic'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPolicy = new PolicyStatement(); remediationPolicy.addActions('sns:SetTopicAttributes', 'sns:GetTopicAttributes'); @@ -2241,7 +2241,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'EnableDeliveryStatusLoggingForSNSTopic'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPolicy = new PolicyStatement(); remediationPolicy.addActions('sns:SetTopicAttributes', 'sns:GetTopicAttributes'); @@ -2249,14 +2249,8 @@ export class RemediationRunbookStack extends cdk.Stack { remediationPolicy.addResources('*'); inlinePolicy.addStatements(remediationPolicy); - const sns2Role = new SNS2DeliveryStatusLoggingRole(props.roleStack, 'SNS2DeliveryStatusLoggingRole'); - - new StringParameter(props.roleStack, 'DeliveryStatusLoggingRoleParameter', { - description: `Parameter to store the IAM role required to run the SNS delivery status logging remediation. - This value is stored due to it being an IAM role that is retained after deletion. - Only delete if you have not run any related remediation to prevent logging outages.`, - parameterName: `/Solutions/${RESOURCE_PREFIX}/DeliveryStatusLoggingRole`, - stringValue: sns2Role.roleArn, + const sns2Role = new SNS2DeliveryStatusLoggingRole(props.roleStack, 'SNS2DeliveryStatusLoggingRole', { + roleName: `${RESOURCE_PREFIX}-SNS2DeliveryStatusLogging-remediationRole`, }); const iamPerms = new PolicyStatement(); @@ -2299,7 +2293,7 @@ export class RemediationRunbookStack extends cdk.Stack { // { const remediationName = 'DisablePublicIPAutoAssign'; - const inlinePolicy = new Policy(props.roleStack, `SHARR-Remediation-Policy-${remediationName}`); + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); const remediationPolicy = new PolicyStatement(); remediationPolicy.addActions('ec2:DescribeSubnets', 'ec2:ModifySubnetAttribute'); @@ -2335,5 +2329,664 @@ export class RemediationRunbookStack extends cdk.Stack { }, }; } + //----------------------- + // EnableIMDSV2OnInstance + // + { + const remediationName = 'EnableIMDSV2OnInstance'; + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); + + const remediationPolicy = new PolicyStatement(); + remediationPolicy.addActions('ec2:DescribeInstances', 'ec2:ModifyInstanceMetadataOptions'); + remediationPolicy.effect = Effect.ALLOW; + remediationPolicy.addResources('*'); + inlinePolicy.addStatements(remediationPolicy); + + new SsmRole(props.roleStack, 'RemediationRole ' + remediationName, { + solutionId: props.solutionId, + ssmDocName: remediationName, + remediationPolicy: inlinePolicy, + remediationRoleName: `${remediationRoleNameBase}${remediationName}`, + }); + + RunbookFactory.createRemediationRunbook(this, 'ASR ' + remediationName, { + ssmDocName: remediationName, + ssmDocPath: ssmdocs, + ssmDocFileName: `${remediationName}.yaml`, + scriptPath: `${ssmdocs}/scripts`, + solutionVersion: props.solutionVersion, + solutionDistBucket: props.solutionDistBucket, + solutionId: props.solutionId, + }); + const childToMod = inlinePolicy.node.findChild('Resource') as CfnPolicy; + childToMod.cfnOptions.metadata = { + cfn_nag: { + rules_to_suppress: [ + { + id: 'W12', + reason: 'Resource * is required for to allow remediation for any resource.', + }, + ], + }, + }; + } + + //----------------------- + // RemoveCodeBuildPrivilegedMode + // + { + const remediationName = 'RemoveCodeBuildPrivilegedMode'; + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); + + const remediationPolicy = new PolicyStatement(); + remediationPolicy.addActions('codebuild:BatchGetProjects', 'codebuild:UpdateProject'); + remediationPolicy.effect = Effect.ALLOW; + remediationPolicy.addResources('*'); + inlinePolicy.addStatements(remediationPolicy); + + new SsmRole(props.roleStack, 'RemediationRole ' + remediationName, { + solutionId: props.solutionId, + ssmDocName: remediationName, + remediationPolicy: inlinePolicy, + remediationRoleName: `${remediationRoleNameBase}${remediationName}`, + }); + + RunbookFactory.createRemediationRunbook(this, 'ASR ' + remediationName, { + ssmDocName: remediationName, + ssmDocPath: ssmdocs, + ssmDocFileName: `${remediationName}.yaml`, + scriptPath: `${ssmdocs}/scripts`, + solutionVersion: props.solutionVersion, + solutionDistBucket: props.solutionDistBucket, + solutionId: props.solutionId, + }); + const childToMod = inlinePolicy.node.findChild('Resource') as CfnPolicy; + childToMod.cfnOptions.metadata = { + cfn_nag: { + rules_to_suppress: [ + { + id: 'W12', + reason: 'Resource * is required for to allow remediation for any resource.', + }, + ], + }, + }; + } + //----------------------- + // EnableCloudFrontDefaultRootObject + // + { + const remediationName = 'EnableCloudFrontDefaultRootObject'; + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); + + const remediationPolicy = new PolicyStatement(); + remediationPolicy.addActions('cloudfront:GetDistributionConfig', 'cloudfront:UpdateDistribution'); + remediationPolicy.effect = Effect.ALLOW; + remediationPolicy.addResources('*'); + inlinePolicy.addStatements(remediationPolicy); + + new SsmRole(props.roleStack, 'RemediationRole ' + remediationName, { + solutionId: props.solutionId, + ssmDocName: remediationName, + remediationPolicy: inlinePolicy, + remediationRoleName: `${remediationRoleNameBase}${remediationName}`, + }); + + RunbookFactory.createRemediationRunbook(this, 'ASR ' + remediationName, { + ssmDocName: remediationName, + ssmDocPath: ssmdocs, + ssmDocFileName: `${remediationName}.yaml`, + scriptPath: `${ssmdocs}/scripts`, + solutionVersion: props.solutionVersion, + solutionDistBucket: props.solutionDistBucket, + solutionId: props.solutionId, + }); + const childToMod = inlinePolicy.node.findChild('Resource') as CfnPolicy; + childToMod.cfnOptions.metadata = { + cfn_nag: { + rules_to_suppress: [ + { + id: 'W12', + reason: 'Resource * is required for to allow remediation for any resource.', + }, + ], + }, + }; + } + + //----------------------- + // BlockSSMDocumentPublicAccess + // + { + const remediationName = 'BlockSSMDocumentPublicAccess'; + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); + + const remediationPolicy = new PolicyStatement(); + remediationPolicy.addActions('ssm:DescribeDocumentPermission', 'ssm:ModifyDocumentPermission'); + remediationPolicy.effect = Effect.ALLOW; + remediationPolicy.addResources('*'); + inlinePolicy.addStatements(remediationPolicy); + + new SsmRole(props.roleStack, 'RemediationRole ' + remediationName, { + solutionId: props.solutionId, + ssmDocName: remediationName, + remediationPolicy: inlinePolicy, + remediationRoleName: `${remediationRoleNameBase}${remediationName}`, + }); + + RunbookFactory.createRemediationRunbook(this, 'ASR ' + remediationName, { + ssmDocName: remediationName, + ssmDocPath: ssmdocs, + ssmDocFileName: `${remediationName}.yaml`, + scriptPath: `${ssmdocs}/scripts`, + solutionVersion: props.solutionVersion, + solutionDistBucket: props.solutionDistBucket, + solutionId: props.solutionId, + }); + const childToMod = inlinePolicy.node.findChild('Resource') as CfnPolicy; + childToMod.cfnOptions.metadata = { + cfn_nag: { + rules_to_suppress: [ + { + id: 'W12', + reason: 'Resource * is required for to allow remediation for any resource.', + }, + ], + }, + }; + } + //----------------------- + // EnableBucketEventNotifications + // + { + const remediationName = 'EnableBucketEventNotifications'; + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); + + const remediationPolicy = new PolicyStatement(); + remediationPolicy.addActions( + 's3:GetBucketNotification', + 's3:PutBucketNotification', + 'sns:CreateTopic', + 'sns:GetTopicAttributes', + 'sns:SetTopicAttributes', + ); + remediationPolicy.effect = Effect.ALLOW; + remediationPolicy.addResources('*'); + inlinePolicy.addStatements(remediationPolicy); + + new SsmRole(props.roleStack, 'RemediationRole ' + remediationName, { + solutionId: props.solutionId, + ssmDocName: remediationName, + remediationPolicy: inlinePolicy, + remediationRoleName: `${remediationRoleNameBase}${remediationName}`, + }); + + RunbookFactory.createRemediationRunbook(this, 'ASR ' + remediationName, { + ssmDocName: remediationName, + ssmDocPath: ssmdocs, + ssmDocFileName: `${remediationName}.yaml`, + scriptPath: `${ssmdocs}/scripts`, + solutionVersion: props.solutionVersion, + solutionDistBucket: props.solutionDistBucket, + solutionId: props.solutionId, + }); + const childToMod = inlinePolicy.node.findChild('Resource') as CfnPolicy; + childToMod.cfnOptions.metadata = { + cfn_nag: { + rules_to_suppress: [ + { + id: 'W12', + reason: 'Resource * is required for to allow remediation for any resource.', + }, + ], + }, + }; + } + //----------------------- + // AWSConfigRemediation-SetCloudFrontOriginDomain + // + { + const remediationName = 'SetCloudFrontOriginDomain'; + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); + const remediationPolicy = new PolicyStatement(); + remediationPolicy.addActions('cloudfront:UpdateDistribution', 'cloudfront:GetDistributionConfig'); + remediationPolicy.effect = Effect.ALLOW; + remediationPolicy.addResources('arn:' + this.partition + ':cloudfront::' + this.account + ':distribution/*'); + inlinePolicy.addStatements(remediationPolicy); + + new SsmRole(props.roleStack, 'RemediationRole ' + remediationName, { + solutionId: props.solutionId, + ssmDocName: remediationName, + remediationPolicy: inlinePolicy, + remediationRoleName: `${remediationRoleNameBase}${remediationName}`, + }); + + RunbookFactory.createRemediationRunbook(this, 'ASR ' + remediationName, { + ssmDocName: remediationName, + ssmDocPath: ssmdocs, + ssmDocFileName: `${remediationName}.yaml`, + scriptPath: `${ssmdocs}/scripts`, + solutionVersion: props.solutionVersion, + solutionDistBucket: props.solutionDistBucket, + solutionId: props.solutionId, + }); + const childToMod = inlinePolicy.node.findChild('Resource') as CfnPolicy; + childToMod.cfnOptions.metadata = { + cfn_nag: { + rules_to_suppress: [ + { + id: 'W12', + reason: 'Resource * is required for to allow remediation for any resource.', + }, + ], + }, + }; + } + //----------------------- + // DisableUnrestrictedAccessToHighRiskPorts + // + { + const remediationName = 'DisableUnrestrictedAccessToHighRiskPorts'; + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); + + const remediationPolicy = new PolicyStatement(); + remediationPolicy.addActions('ec2:DescribeSecurityGroupRules', 'ec2:RevokeSecurityGroupIngress'); + remediationPolicy.effect = Effect.ALLOW; + remediationPolicy.addResources('*'); + inlinePolicy.addStatements(remediationPolicy); + + new SsmRole(props.roleStack, 'RemediationRole ' + remediationName, { + solutionId: props.solutionId, + ssmDocName: remediationName, + remediationPolicy: inlinePolicy, + remediationRoleName: `${remediationRoleNameBase}${remediationName}`, + }); + + RunbookFactory.createRemediationRunbook(this, 'ASR ' + remediationName, { + ssmDocName: remediationName, + ssmDocPath: ssmdocs, + ssmDocFileName: `${remediationName}.yaml`, + scriptPath: `${ssmdocs}/scripts`, + solutionVersion: props.solutionVersion, + solutionDistBucket: props.solutionDistBucket, + solutionId: props.solutionId, + }); + const childToMod = inlinePolicy.node.findChild('Resource') as CfnPolicy; + childToMod.cfnOptions.metadata = { + cfn_nag: { + rules_to_suppress: [ + { + id: 'W12', + reason: 'Resource * is required for to allow remediation for any resource.', + }, + ], + }, + }; + } + //----------------------- + // EnablePrivateRepositoryScanning + // + { + const remediationName = 'EnablePrivateRepositoryScanning'; + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); + + const remediationPolicy = new PolicyStatement(); + remediationPolicy.addActions('ecr:PutImageScanningConfiguration'); + remediationPolicy.effect = Effect.ALLOW; + remediationPolicy.addResources('*'); + inlinePolicy.addStatements(remediationPolicy); + + new SsmRole(props.roleStack, 'RemediationRole ' + remediationName, { + solutionId: props.solutionId, + ssmDocName: remediationName, + remediationPolicy: inlinePolicy, + remediationRoleName: `${remediationRoleNameBase}${remediationName}`, + }); + + RunbookFactory.createRemediationRunbook(this, 'ASR ' + remediationName, { + ssmDocName: remediationName, + ssmDocPath: ssmdocs, + ssmDocFileName: `${remediationName}.yaml`, + scriptPath: `${ssmdocs}/scripts`, + solutionVersion: props.solutionVersion, + solutionDistBucket: props.solutionDistBucket, + solutionId: props.solutionId, + }); + const childToMod = inlinePolicy.node.findChild('Resource') as CfnPolicy; + childToMod.cfnOptions.metadata = { + cfn_nag: { + rules_to_suppress: [ + { + id: 'W12', + reason: 'Resource * is required for to allow remediation for any resource.', + }, + ], + }, + }; + } + //----------------------- + // SetS3LifecyclePolicy + // + { + const remediationName = 'SetS3LifecyclePolicy'; + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); + + const remediationPolicy = new PolicyStatement(); + remediationPolicy.addActions('s3:PutLifecycleConfiguration', 's3:GetLifecycleConfiguration'); + remediationPolicy.effect = Effect.ALLOW; + remediationPolicy.addResources('*'); + inlinePolicy.addStatements(remediationPolicy); + + new SsmRole(props.roleStack, 'RemediationRole ' + remediationName, { + solutionId: props.solutionId, + ssmDocName: remediationName, + remediationPolicy: inlinePolicy, + remediationRoleName: `${remediationRoleNameBase}${remediationName}`, + }); + + RunbookFactory.createRemediationRunbook(this, 'ASR ' + remediationName, { + ssmDocName: remediationName, + ssmDocPath: ssmdocs, + ssmDocFileName: `${remediationName}.yaml`, + scriptPath: `${ssmdocs}/scripts`, + solutionVersion: props.solutionVersion, + solutionDistBucket: props.solutionDistBucket, + solutionId: props.solutionId, + }); + const childToMod = inlinePolicy.node.findChild('Resource') as CfnPolicy; + childToMod.cfnOptions.metadata = { + cfn_nag: { + rules_to_suppress: [ + { + id: 'W12', + reason: 'Resource * is required for to allow remediation for any resource.', + }, + ], + }, + }; + } + //----------------------- + // UpdateSecretRotationPeriod + // + { + const remediationName = 'UpdateSecretRotationPeriod'; + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); + + const remediationPolicy = new PolicyStatement(); + remediationPolicy.addActions('secretsmanager:RotateSecret', 'secretsmanager:DescribeSecret'); + remediationPolicy.effect = Effect.ALLOW; + remediationPolicy.addResources('*'); + inlinePolicy.addStatements(remediationPolicy); + + new SsmRole(props.roleStack, 'RemediationRole ' + remediationName, { + solutionId: props.solutionId, + ssmDocName: remediationName, + remediationPolicy: inlinePolicy, + remediationRoleName: `${remediationRoleNameBase}${remediationName}`, + }); + + RunbookFactory.createRemediationRunbook(this, 'ASR ' + remediationName, { + ssmDocName: remediationName, + ssmDocPath: ssmdocs, + ssmDocFileName: `${remediationName}.yaml`, + scriptPath: `${ssmdocs}/scripts`, + solutionVersion: props.solutionVersion, + solutionDistBucket: props.solutionDistBucket, + solutionId: props.solutionId, + }); + const childToMod = inlinePolicy.node.findChild('Resource') as CfnPolicy; + childToMod.cfnOptions.metadata = { + cfn_nag: { + rules_to_suppress: [ + { + id: 'W12', + reason: 'Resource * is required for to allow remediation for any resource.', + }, + ], + }, + }; + } + //----------------------- + // DisableTGWAutoAcceptSharedAttachments + // + { + const remediationName = 'DisableTGWAutoAcceptSharedAttachments'; + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); + + const remediationPolicy = new PolicyStatement(); + remediationPolicy.addActions('ec2:ModifyTransitGateway', 'ec2:DescribeTransitGateways'); + remediationPolicy.effect = Effect.ALLOW; + remediationPolicy.addResources('*'); + inlinePolicy.addStatements(remediationPolicy); + + new SsmRole(props.roleStack, 'RemediationRole ' + remediationName, { + solutionId: props.solutionId, + ssmDocName: remediationName, + remediationPolicy: inlinePolicy, + remediationRoleName: `${remediationRoleNameBase}${remediationName}`, + }); + + RunbookFactory.createRemediationRunbook(this, 'ASR ' + remediationName, { + ssmDocName: remediationName, + ssmDocPath: ssmdocs, + ssmDocFileName: `${remediationName}.yaml`, + scriptPath: `${ssmdocs}/scripts`, + solutionVersion: props.solutionVersion, + solutionDistBucket: props.solutionDistBucket, + solutionId: props.solutionId, + }); + const childToMod = inlinePolicy.node.findChild('Resource') as CfnPolicy; + childToMod.cfnOptions.metadata = { + cfn_nag: { + rules_to_suppress: [ + { + id: 'W12', + reason: 'Resource * is required for to allow remediation for any resource.', + }, + ], + }, + }; + } + //----------------------- + // EnableGuardDuty + // + { + const remediationName = 'EnableGuardDuty'; + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); + + const remediationPolicy = new PolicyStatement(); + remediationPolicy.addActions( + 'guardduty:ListDetectors', + 'guardduty:CreateDetector', + 'guardduty:GetDetector', + 'guardduty:UpdateDetector', + ); + remediationPolicy.effect = Effect.ALLOW; + remediationPolicy.addResources('*'); + inlinePolicy.addStatements(remediationPolicy); + + new SsmRole(props.roleStack, 'RemediationRole ' + remediationName, { + solutionId: props.solutionId, + ssmDocName: remediationName, + remediationPolicy: inlinePolicy, + remediationRoleName: `${remediationRoleNameBase}${remediationName}`, + }); + + RunbookFactory.createRemediationRunbook(this, 'ASR ' + remediationName, { + ssmDocName: remediationName, + ssmDocPath: ssmdocs, + ssmDocFileName: `${remediationName}.yaml`, + scriptPath: `${ssmdocs}/scripts`, + solutionVersion: props.solutionVersion, + solutionDistBucket: props.solutionDistBucket, + solutionId: props.solutionId, + }); + const childToMod = inlinePolicy.node.findChild('Resource') as CfnPolicy; + childToMod.cfnOptions.metadata = { + cfn_nag: { + rules_to_suppress: [ + { + id: 'W12', + reason: 'Resource * is required for to allow remediation for any resource.', + }, + ], + }, + }; + } + //----------------------- + // EnableAutoSecretRotation + // + { + const remediationName = 'EnableAutoSecretRotation'; + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); + + const remediationPolicy = new PolicyStatement(); + remediationPolicy.addActions('secretsmanager:RotateSecret', 'secretsmanager:DescribeSecret'); + remediationPolicy.effect = Effect.ALLOW; + remediationPolicy.addResources('*'); + inlinePolicy.addStatements(remediationPolicy); + + new SsmRole(props.roleStack, 'RemediationRole ' + remediationName, { + solutionId: props.solutionId, + ssmDocName: remediationName, + remediationPolicy: inlinePolicy, + remediationRoleName: `${remediationRoleNameBase}${remediationName}`, + }); + + RunbookFactory.createRemediationRunbook(this, 'ASR ' + remediationName, { + ssmDocName: remediationName, + ssmDocPath: ssmdocs, + ssmDocFileName: `${remediationName}.yaml`, + scriptPath: `${ssmdocs}/scripts`, + solutionVersion: props.solutionVersion, + solutionDistBucket: props.solutionDistBucket, + solutionId: props.solutionId, + }); + const childToMod = inlinePolicy.node.findChild('Resource') as CfnPolicy; + childToMod.cfnOptions.metadata = { + cfn_nag: { + rules_to_suppress: [ + { + id: 'W12', + reason: 'Resource * is required for to allow remediation for any resource.', + }, + ], + }, + }; + } + //----------------------- + // RevokeUnauthorizedInboundRules + // + { + const remediationName = 'RevokeUnauthorizedInboundRules'; + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); + + const remediationPolicy = new PolicyStatement(); + remediationPolicy.addActions('ec2:DescribeSecurityGroupRules', 'ec2:RevokeSecurityGroupIngress'); + remediationPolicy.effect = Effect.ALLOW; + remediationPolicy.addResources('*'); + inlinePolicy.addStatements(remediationPolicy); + + new SsmRole(props.roleStack, 'RemediationRole ' + remediationName, { + solutionId: props.solutionId, + ssmDocName: remediationName, + remediationPolicy: inlinePolicy, + remediationRoleName: `${remediationRoleNameBase}${remediationName}`, + }); + + RunbookFactory.createRemediationRunbook(this, 'ASR ' + remediationName, { + ssmDocName: remediationName, + ssmDocPath: ssmdocs, + ssmDocFileName: `${remediationName}.yaml`, + scriptPath: `${ssmdocs}/scripts`, + solutionVersion: props.solutionVersion, + solutionDistBucket: props.solutionDistBucket, + solutionId: props.solutionId, + }); + const childToMod = inlinePolicy.node.findChild('Resource') as CfnPolicy; + childToMod.cfnOptions.metadata = { + cfn_nag: { + rules_to_suppress: [ + { + id: 'W12', + reason: 'Resource * is required for to allow remediation for any resource.', + }, + ], + }, + }; + } + //----------------------- + // RemoveUnusedSecret + // + { + const remediationName = 'RemoveUnusedSecret'; + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); + + const remediationPolicy = new PolicyStatement(); + remediationPolicy.addActions('secretsmanager:DeleteSecret', 'secretsmanager:DescribeSecret'); + remediationPolicy.effect = Effect.ALLOW; + remediationPolicy.addResources('*'); + inlinePolicy.addStatements(remediationPolicy); + + new SsmRole(props.roleStack, 'RemediationRole ' + remediationName, { + solutionId: props.solutionId, + ssmDocName: remediationName, + remediationPolicy: inlinePolicy, + remediationRoleName: `${remediationRoleNameBase}${remediationName}`, + }); + + RunbookFactory.createRemediationRunbook(this, 'ASR ' + remediationName, { + ssmDocName: remediationName, + ssmDocPath: ssmdocs, + ssmDocFileName: `${remediationName}.yaml`, + scriptPath: `${ssmdocs}/scripts`, + solutionVersion: props.solutionVersion, + solutionDistBucket: props.solutionDistBucket, + solutionId: props.solutionId, + }); + const childToMod = inlinePolicy.node.findChild('Resource') as CfnPolicy; + childToMod.cfnOptions.metadata = { + cfn_nag: { + rules_to_suppress: [ + { + id: 'W12', + reason: 'Resource * is required for to allow remediation for any resource.', + }, + ], + }, + }; + } + //----------------------- + // AWS-TerminateEC2Instance + // + { + const remediationName = 'TerminateEC2Instance'; + const inlinePolicy = new Policy(props.roleStack, `ASR-Remediation-Policy-${remediationName}`); + + const remediationPolicy = new PolicyStatement(); + remediationPolicy.addActions('ec2:TerminateInstances', 'ec2:DescribeInstanceStatus'); + remediationPolicy.effect = Effect.ALLOW; + remediationPolicy.addResources('*'); + inlinePolicy.addStatements(remediationPolicy); + + new SsmRole(props.roleStack, 'RemediationRole ' + remediationName, { + solutionId: props.solutionId, + ssmDocName: remediationName, + remediationPolicy: inlinePolicy, + remediationRoleName: `${remediationRoleNameBase}${remediationName}`, + }); + + const childToMod = inlinePolicy.node.findChild('Resource') as CfnPolicy; + childToMod.cfnOptions.metadata = { + cfn_nag: { + rules_to_suppress: [ + { + id: 'W12', + reason: 'Resource * is required for to allow remediation for any resource.', + }, + ], + }, + }; + } } } diff --git a/source/lib/sharrplaybook-construct.ts b/source/lib/sharrplaybook-construct.ts index 22fead6a..9703e4f9 100644 --- a/source/lib/sharrplaybook-construct.ts +++ b/source/lib/sharrplaybook-construct.ts @@ -39,7 +39,7 @@ export class PlaybookPrimaryStack extends cdk.Stack { const RESOURCE_PREFIX = props.solutionId.replace(/^DEV-/, ''); // prefix on every resource name const orchestratorArn = StringParameter.valueForStringParameter( this, - `/Solutions/${RESOURCE_PREFIX}/OrchestratorArn` + `/Solutions/${RESOURCE_PREFIX}/OrchestratorArn`, ); // Register the playbook. These parameters enable the step function to route matching events @@ -74,10 +74,7 @@ export class PlaybookPrimaryStack extends cdk.Stack { }); } let generatorId = ''; - if ( - props.securityStandard === 'CIS' && - (props.securityStandardVersion === '1.2.0' || props.securityStandardVersion === '1.4.0') - ) { + if (props.securityStandard === 'CIS' && props.securityStandardVersion === '1.2.0') { // CIS 1.2.0 uses an arn-like format: arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/1.3 generatorId = `arn:${stack.partition}:securityhub:::ruleset/${props.securityStandardLongName}/v/${props.securityStandardVersion}/rule/${controlSpec.control}`; } else { @@ -127,7 +124,7 @@ export class PlaybookMemberStack extends cdk.Stack { const waitProvider = WaitProvider.fromServiceToken( this, 'WaitProvider', - waitProviderServiceTokenParam.valueAsString + waitProviderServiceTokenParam.valueAsString, ); Aspects.of(this).add(new SsmDocRateLimit(waitProvider)); diff --git a/source/lib/sns2-remediation-resources.ts b/source/lib/sns2-remediation-resources.ts index a0db9ceb..9e9488a3 100644 --- a/source/lib/sns2-remediation-resources.ts +++ b/source/lib/sns2-remediation-resources.ts @@ -5,10 +5,13 @@ import * as cdk from 'aws-cdk-lib'; import { Effect, Policy, PolicyStatement, Role, ServicePrincipal, CfnRole } from 'aws-cdk-lib/aws-iam'; import { Construct } from 'constructs'; +export interface ISNS2DeliveryStatusLoggingRole { + roleName: string; +} + export class SNS2DeliveryStatusLoggingRole extends Construct { roleArn: string; - roleName: string; - constructor(scope: Construct, id: string) { + constructor(scope: Construct, id: string, props: ISNS2DeliveryStatusLoggingRole) { super(scope, id); const deliveryStatusLoggingPolicy = new Policy(this, 'Delivery-Status-Logging-Policy'); @@ -42,9 +45,10 @@ export class SNS2DeliveryStatusLoggingRole extends Construct { This role is retained after the solution is deleted to support continuing function of SNS delivery status logging enabled by this remediation. Before removing this role, use IAM access analyzer for confirming it's safe`, + roleName: props.roleName, }); + this.roleArn = deliveryStatusLoggingRole.roleArn; - this.roleName = deliveryStatusLoggingRole.roleName; deliveryStatusLoggingRole.attachInlinePolicy(deliveryStatusLoggingPolicy); deliveryStatusLoggingRole.applyRemovalPolicy(cdk.RemovalPolicy.RETAIN); diff --git a/source/lib/solution_deploy-stack.ts b/source/lib/solution_deploy-stack.ts index 6ce99acd..4d2b49c9 100644 --- a/source/lib/solution_deploy-stack.ts +++ b/source/lib/solution_deploy-stack.ts @@ -5,8 +5,11 @@ import * as cdk from 'aws-cdk-lib'; import * as s3 from 'aws-cdk-lib/aws-s3'; import * as sns from 'aws-cdk-lib/aws-sns'; import * as lambda from 'aws-cdk-lib/aws-lambda'; +import * as dynamodb from 'aws-cdk-lib/aws-dynamodb'; +import * as sqs from 'aws-cdk-lib/aws-sqs'; import { StringParameter, CfnParameter } from 'aws-cdk-lib/aws-ssm'; import * as kms from 'aws-cdk-lib/aws-kms'; +import * as lambdaEventSources from 'aws-cdk-lib/aws-lambda-event-sources'; import * as fs from 'fs'; import { Role, @@ -21,6 +24,8 @@ import { import { OrchestratorConstruct } from './common-orchestrator-construct'; import { CfnStateMachine, StateMachine } from 'aws-cdk-lib/aws-stepfunctions'; import { OneTrigger } from './ssmplaybook'; +import { CloudWatchMetrics } from './cloudwatch_metrics'; + export interface SHARRStackProps extends cdk.StackProps { solutionId: string; solutionVersion: string; @@ -32,7 +37,7 @@ export interface SHARRStackProps extends cdk.StackProps { } export class SolutionDeployStack extends cdk.Stack { - SEND_ANONYMOUS_DATA = 'Yes'; + SEND_ANONYMIZED_DATA = 'Yes'; nestedStacks: cdk.Stack[]; constructor(scope: cdk.App, id: string, props: SHARRStackProps) { @@ -112,20 +117,18 @@ export class SolutionDeployStack extends cdk.Stack { new StringParameter(this, 'SHARR_SNS_Topic', { description: - 'SNS Topic ARN where SHARR will send status messages. This\ - topic can be useful for driving additional actions, such as email notifications,\ - trouble ticket updates.', + 'SNS Topic ARN where SHARR will send status messages. This topic can be useful for driving additional actions, such as email notifications, trouble ticket updates.', parameterName: '/Solutions/' + RESOURCE_PREFIX + '/SNS_Topic_ARN', stringValue: snsTopic.topicArn, }); const mapping = new cdk.CfnMapping(this, 'mappings'); - mapping.setValue('sendAnonymousMetrics', 'data', this.SEND_ANONYMOUS_DATA); + mapping.setValue('sendAnonymizedMetrics', 'data', this.SEND_ANONYMIZED_DATA); new StringParameter(this, 'SHARR_SendAnonymousMetrics', { description: 'Flag to enable or disable sending anonymous metrics.', - parameterName: '/Solutions/' + RESOURCE_PREFIX + '/sendAnonymousMetrics', - stringValue: mapping.findInMap('sendAnonymousMetrics', 'data'), + parameterName: '/Solutions/' + RESOURCE_PREFIX + '/sendAnonymizedMetrics', + stringValue: mapping.findInMap('sendAnonymizedMetrics', 'data'), }); new StringParameter(this, 'SHARR_version', { @@ -144,7 +147,7 @@ export class SolutionDeployStack extends cdk.Stack { license: 'https://www.apache.org/licenses/LICENSE-2.0', code: lambda.Code.fromBucket( SolutionsBucket, - props.solutionTMN + '/' + props.solutionVersion + '/lambda/layer.zip' + props.solutionTMN + '/' + props.solutionVersion + '/lambda/layer.zip', ), }); @@ -239,7 +242,7 @@ export class SolutionDeployStack extends cdk.Stack { description: 'Checks the status of an SSM Automation Document in the target account', code: lambda.Code.fromBucket( SolutionsBucket, - props.solutionTMN + '/' + props.solutionVersion + '/lambda/check_ssm_doc_state.py.zip' + props.solutionTMN + '/' + props.solutionVersion + '/lambda/check_ssm_doc_state.py.zip', ), environment: { log_level: 'info', @@ -276,13 +279,6 @@ export class SolutionDeployStack extends cdk.Stack { }; } - cdk_nag.NagSuppressions.addResourceSuppressions(checkSSMDocState, [ - { - id: "AwsSolutions-L1", - reason: "Will upgrade in next release to prioritize patch", - }, - ]); - /** * @description getApprovalRequirement - determine whether manual approval is required * @type {lambda.Function} @@ -294,7 +290,7 @@ export class SolutionDeployStack extends cdk.Stack { description: 'Determines if a manual approval is required for remediation', code: lambda.Code.fromBucket( SolutionsBucket, - props.solutionTMN + '/' + props.solutionVersion + '/lambda/get_approval_requirement.py.zip' + props.solutionTMN + '/' + props.solutionVersion + '/lambda/get_approval_requirement.py.zip', ), environment: { log_level: 'info', @@ -332,13 +328,6 @@ export class SolutionDeployStack extends cdk.Stack { }; } - cdk_nag.NagSuppressions.addResourceSuppressions(getApprovalRequirement, [ - { - id: "AwsSolutions-L1", - reason: "Will upgrade in next release to prioritize patch", - }, - ]); - /** * @description execAutomation - initiate an SSM automation document in a target account * @type {lambda.Function} @@ -350,7 +339,7 @@ export class SolutionDeployStack extends cdk.Stack { description: 'Executes an SSM Automation Document in a target account', code: lambda.Code.fromBucket( SolutionsBucket, - props.solutionTMN + '/' + props.solutionVersion + '/lambda/exec_ssm_doc.py.zip' + props.solutionTMN + '/' + props.solutionVersion + '/lambda/exec_ssm_doc.py.zip', ), environment: { log_level: 'info', @@ -387,13 +376,6 @@ export class SolutionDeployStack extends cdk.Stack { }; } - cdk_nag.NagSuppressions.addResourceSuppressions(execAutomation, [ - { - id: "AwsSolutions-L1", - reason: "Will upgrade in next release to prioritize patch", - }, - ]); - /** * @description monitorSSMExecState - get the status of an ssm execution * @type {lambda.Function} @@ -405,7 +387,7 @@ export class SolutionDeployStack extends cdk.Stack { description: 'Checks the status of an SSM automation document execution', code: lambda.Code.fromBucket( SolutionsBucket, - props.solutionTMN + '/' + props.solutionVersion + '/lambda/check_ssm_execution.py.zip' + props.solutionTMN + '/' + props.solutionVersion + '/lambda/check_ssm_execution.py.zip', ), environment: { log_level: 'info', @@ -442,13 +424,6 @@ export class SolutionDeployStack extends cdk.Stack { }; } - cdk_nag.NagSuppressions.addResourceSuppressions(monitorSSMExecState, [ - { - id: "AwsSolutions-L1", - reason: "Will upgrade in next release to prioritize patch", - }, - ]); - /** * @description Policy for role used by common Orchestrator notification lambda * @type {Policy} @@ -476,6 +451,10 @@ export class SolutionDeployStack extends cdk.Stack { actions: ['sns:Publish'], resources: [`arn:${this.partition}:sns:${this.region}:${this.account}:${RESOURCE_PREFIX}-SHARR_Topic`], }), + new PolicyStatement({ + actions: ['cloudwatch:PutMetricData'], + resources: ['*'], + }), ], }); @@ -546,7 +525,7 @@ export class SolutionDeployStack extends cdk.Stack { description: 'Sends notifications and log messages', code: lambda.Code.fromBucket( SolutionsBucket, - props.solutionTMN + '/' + props.solutionVersion + '/lambda/send_notifications.py.zip' + props.solutionTMN + '/' + props.solutionVersion + '/lambda/send_notifications.py.zip', ), environment: { log_level: 'info', @@ -583,13 +562,6 @@ export class SolutionDeployStack extends cdk.Stack { }; } - cdk_nag.NagSuppressions.addResourceSuppressions(sendNotifications, [ - { - id: "AwsSolutions-L1", - reason: "Will upgrade in next release to prioritize patch", - }, - ]); - //------------------------------------------------------------------------- // Custom Lambda Policy // @@ -668,12 +640,12 @@ export class SolutionDeployStack extends cdk.Stack { description: 'Custom resource to create an action target in Security Hub', code: lambda.Code.fromBucket( SolutionsBucket, - props.solutionTMN + '/' + props.solutionVersion + '/lambda/action_target_provider.zip' + props.solutionTMN + '/' + props.solutionVersion + '/lambda/action_target_provider.zip', ), environment: { log_level: 'info', AWS_PARTITION: this.partition, - sendAnonymousMetrics: mapping.findInMap('sendAnonymousMetrics', 'data'), + sendAnonymizedMetrics: mapping.findInMap('sendAnonymizedMetrics', 'data'), SOLUTION_ID: props.solutionId, SOLUTION_VERSION: props.solutionVersion, }, @@ -704,12 +676,30 @@ export class SolutionDeployStack extends cdk.Stack { }, }; - cdk_nag.NagSuppressions.addResourceSuppressions(createCustomAction, [ - { - id: "AwsSolutions-L1", - reason: "Will upgrade in next release to prioritize patch", - }, - ]); + //--------------------------------------------------------------------- + // Scheduling Queue for SQS Remediation Throttling + // + const deadLetterQueue = new sqs.Queue(this, 'deadLetterSchedulingQueue', { + encryption: sqs.QueueEncryption.KMS, + enforceSSL: true, + encryptionMasterKey: kmsKey, + }); + + const deadLetterQueueDeclaration: sqs.DeadLetterQueue = { + maxReceiveCount: 10, + queue: deadLetterQueue, + }; + + const schedulingQueue = new sqs.Queue(this, 'SchedulingQueue', { + encryption: sqs.QueueEncryption.KMS, + enforceSSL: true, + deadLetterQueue: deadLetterQueueDeclaration, + encryptionMasterKey: kmsKey, + }); + + const eventSource = new lambdaEventSources.SqsEventSource(schedulingQueue, { + batchSize: 1, + }); const orchestrator = new OrchestratorConstruct(this, 'orchestrator', { roleArn: orchestratorRole.roleArn, @@ -723,6 +713,7 @@ export class SolutionDeployStack extends cdk.Stack { solutionVersion: props.solutionVersion, orchLogGroup: props.orchLogGroup, kmsKeyParm: kmsKeyParm, + sqsQueue: schedulingQueue, }); this.nestedStacks.push(orchestrator.nestedStack as cdk.Stack); @@ -776,7 +767,7 @@ export class SolutionDeployStack extends cdk.Stack { '-reference.s3.amazonaws.com/' + cdk.Fn.findInMap('SourceCode', 'General', 'KeyPrefix') + '/playbooks/' + - template_file + template_file, ); cfnStack.cfnOptions.condition = new cdk.CfnCondition(this, `load${file}Cond`, { expression: cdk.Fn.conditionEquals(adminStackOption, 'yes'), @@ -787,6 +778,141 @@ export class SolutionDeployStack extends cdk.Stack { this.nestedStacks.push(adminStack as cdk.Stack); } }); + + //--------------------------------------------------------------------- + // Scheduling Table for SQS Remediation Throttling + // + const schedulingTable = new dynamodb.Table(this, 'SchedulingTable', { + partitionKey: { name: 'AccountID-Region', type: dynamodb.AttributeType.STRING }, + encryption: dynamodb.TableEncryption.AWS_MANAGED, + pointInTimeRecovery: true, + timeToLiveAttribute: 'TTL', + }); + + const schedulingLamdbdaPolicy = new Policy(this, 'SchedulingLambdaPolicy', { + policyName: RESOURCE_PREFIX + '-SHARR_Scheduling_Lambda', + statements: [ + new PolicyStatement({ + actions: ['logs:CreateLogGroup', 'logs:CreateLogStream', 'logs:PutLogEvents'], + resources: ['*'], + }), + new PolicyStatement({ + actions: ['ssm:GetParameter', 'ssm:PutParameter'], + resources: [`arn:${this.partition}:ssm:${this.region}:${this.account}:parameter/Solutions/SO0111/*`], + }), + new PolicyStatement({ + actions: ['cloudwatch:PutMetricData'], + resources: ['*'], + }), + ], + }); + + cdk_nag.NagSuppressions.addResourceSuppressions(schedulingLamdbdaPolicy, [ + { + id: 'AwsSolutions-IAM5', + reason: 'Resource * is required for CloudWatch Logs used by the Scheduling Lambda function.', + }, + ]); + + const schedulingLambdaRole = new Role(this, 'SchedulingLambdaRole', { + assumedBy: new ServicePrincipal('lambda.amazonaws.com'), + description: 'Lambda role to schedule remediations that are sent to SQS through the orchestrator', + }); + + schedulingLambdaRole.attachInlinePolicy(schedulingLamdbdaPolicy); + /** + * @description schedulingLambdaTrigger - Lambda trigger for SQS Queue + * @type {lambda.Function} + */ + const schedulingLambdaTrigger = new lambda.Function(this, 'schedulingLambdaTrigger', { + functionName: RESOURCE_PREFIX + '-SHARR-schedulingLambdaTrigger', + handler: 'schedule_remediation.lambda_handler', + runtime: props.runtimePython, + description: 'SO0111 ASR function that schedules remediations in member accounts', + code: lambda.Code.fromBucket( + SolutionsBucket, + props.solutionTMN + '/' + props.solutionVersion + '/lambda/schedule_remediation.py.zip', + ), + environment: { + SchedulingTableName: schedulingTable.tableName, + RemediationWaitTime: '3', + }, + memorySize: 128, + timeout: cdk.Duration.seconds(10), + role: schedulingLambdaRole, + reservedConcurrentExecutions: 1, + layers: [sharrLambdaLayer], + }); + orchStateMachine.grantTaskResponse(schedulingLambdaTrigger); + schedulingTable.grantReadWriteData(schedulingLambdaTrigger); + + schedulingLambdaTrigger.addEventSource(eventSource); + + const cloudWatchMetrics = new CloudWatchMetrics(this, { + solutionId: props.solutionId, + schedulingQueueName: schedulingQueue.queueName, + orchStateMachineArn: orchStateMachine.stateMachineArn, + kmsKey: kmsKey, + }); + + const customResourceLambdaPolicyDocument = new PolicyDocument({ + statements: [ + new PolicyStatement({ actions: ['cloudwatch:PutMetricData'], resources: ['*'] }), + new PolicyStatement({ + actions: ['logs:CreateLogGroup', 'logs:CreateLogStream', 'logs:PutLogEvents'], + resources: ['*'], + }), + new PolicyStatement({ + actions: ['ssm:GetParameter', 'ssm:GetParameters', 'ssm:PutParameter'], + resources: [`arn:${this.partition}:ssm:*:${this.account}:parameter/Solutions/SO0111/*`], + }), + ], + }); + + const customResourceLambdaRole = new Role(this, `${id}Role`, { + assumedBy: new ServicePrincipal('lambda.amazonaws.com'), + inlinePolicies: { LambdaPolicy: customResourceLambdaPolicyDocument }, + }); + + const customResourceFunction = new lambda.Function(this, 'ASR-DeploymentCustomResource-Lambda', { + code: lambda.Code.fromBucket( + SolutionsBucket, + props.solutionTMN + '/' + props.solutionVersion + '/lambda/deployment_metrics_custom_resource.zip', + ), + handler: 'deployment_metrics_custom_resource.lambda_handler', + runtime: props.runtimePython, + description: 'ASR - Handles deployment related custom actions', + environment: { + LOG_LEVEL: 'INFO', + AWS_PARTITION: this.partition, + SOLUTION_ID: props.solutionId, + SOLUTION_VERSION: props.solutionVersion, + }, + memorySize: 256, + timeout: cdk.Duration.seconds(5), + role: customResourceLambdaRole, + layers: [sharrLambdaLayer], + }); + + new cdk.CustomResource(this, `ASR-DeploymentMetricsCustomResource`, { + resourceType: 'Custom::DeploymentMetrics', + serviceToken: customResourceFunction.functionArn, + properties: { + CloudWatchMetricsDashboardEnabled: cloudWatchMetrics.getCloudWatchMetricsParameterValue(), + }, + }); + + cdk_nag.NagSuppressions.addResourceSuppressions( + customResourceLambdaRole, + [ + { + id: 'AwsSolutions-IAM5', + reason: 'Resource * is needed for CloudWatch Logs policies used on Lambda functions.', + }, + ], + true, + ); + stack.templateOptions.metadata = { 'AWS::CloudFormation::Interface': { ParameterGroups: [ @@ -794,7 +920,18 @@ export class SolutionDeployStack extends cdk.Stack { Label: { default: 'Security Standard Playbooks' }, Parameters: standardLogicalNames, }, + { + Label: { default: 'Orchestrator Configuration' }, + Parameters: ['ReuseOrchestratorLogGroup'], + }, + { + Label: { default: 'CloudWatch Metrics' }, + Parameters: cloudWatchMetrics.getParameterIds(), + }, ], + ParameterLabels: { + ...cloudWatchMetrics.getParameterIdsAndLabels(), + }, }, }; } diff --git a/source/lib/ssm-doc-rate-limit.test.ts b/source/lib/ssm-doc-rate-limit.test.ts index 45e6f3bd..f75bedc5 100644 --- a/source/lib/ssm-doc-rate-limit.test.ts +++ b/source/lib/ssm-doc-rate-limit.test.ts @@ -69,7 +69,7 @@ describe('SSM doc rate limit aspect', function () { documentSets.push( (deleteWaits[logicalId].DependsOn as Array).filter(function (value: string) { return documentLogicalIds.includes(value); - }) + }), ); }); const remainingDocuments = { ...documents }; @@ -167,7 +167,7 @@ describe('SSM doc rate limit aspect with conditional documents', function () { documentSet.push( ...dependencies.filter(function (value: string) { return documentLogicalIds.includes(value); - }) + }), ); documentSets.push(documentSet); }); diff --git a/source/lib/ssm-doc-rate-limit.ts b/source/lib/ssm-doc-rate-limit.ts index 94991d9a..c9460452 100644 --- a/source/lib/ssm-doc-rate-limit.ts +++ b/source/lib/ssm-doc-rate-limit.ts @@ -82,7 +82,7 @@ export default class SsmDocRateLimit implements IAspect { } this.currentDummyResource.addMetadata( `${node.logicalId}Ready`, - Fn.conditionIf(node.cfnOptions.condition.logicalId, Fn.ref(node.logicalId), '') + Fn.conditionIf(node.cfnOptions.condition.logicalId, Fn.ref(node.logicalId), ''), ); this.currentDeleteWaitResource.node.addDependency(this.currentDummyResource); } else { diff --git a/source/lib/ssmplaybook.ts b/source/lib/ssmplaybook.ts index b8a9fe38..8ef3e405 100644 --- a/source/lib/ssmplaybook.ts +++ b/source/lib/ssmplaybook.ts @@ -89,7 +89,10 @@ export class Trigger extends Construct { }); enable_auto_remediation_param.overrideLogicalId( - `${props.securityStandard}${props.securityStandardVersion}${props.controlId}AutoTrigger`.replace(illegalChars, '') + `${props.securityStandard}${props.securityStandardVersion}${props.controlId}AutoTrigger`.replace( + illegalChars, + '', + ), ); interface IPattern { @@ -249,7 +252,7 @@ export class SsmRole extends Construct { actions: ['sts:AssumeRole'], resources: [`arn:${stack.partition}:iam::${stack.account}:role/${props.remediationRoleName}`], effect: Effect.ALLOW, - }) + }), ); // AssumeRole Policy @@ -259,7 +262,7 @@ export class SsmRole extends Construct { const RESOURCE_PREFIX = props.solutionId.replace(/^DEV-/, ''); const roleprincipal = new ArnPrincipal( - `arn:${stack.partition}:iam::${stack.account}:role/${RESOURCE_PREFIX}-SHARR-Orchestrator-Member` + `arn:${stack.partition}:iam::${stack.account}:role/${RESOURCE_PREFIX}-SHARR-Orchestrator-Member`, ); const principals = new CompositePrincipal(roleprincipal); diff --git a/source/lib/wait-provider.ts b/source/lib/wait-provider.ts index d5ee2c2a..39fcb0d7 100644 --- a/source/lib/wait-provider.ts +++ b/source/lib/wait-provider.ts @@ -75,25 +75,19 @@ export class WaitProvider extends Construct { }, ]); - const lambdaFunction = new Function(scope, `${id}Function`, { //NOSONAR This is not unknown code. + const lambdaFunction = new Function(scope, `${id}Function`, { + //NOSONAR This is not unknown code. role, runtime: props.runtimePython, code: Code.fromBucket( Bucket.fromBucketName(scope, 'Bucket', `${props.solutionDistBucket}-${Stack.of(scope).region}`), - props.solutionTMN + '/' + props.solutionVersion + '/lambda/wait_provider.zip' + props.solutionTMN + '/' + props.solutionVersion + '/lambda/wait_provider.zip', ), handler: 'wait_provider.lambda_handler', environment: { LOG_LEVEL: 'INFO' }, timeout: Duration.minutes(15), }); - NagSuppressions.addResourceSuppressions(lambdaFunction, [ - { - id: "AwsSolutions-L1", - reason: "Will upgrade in next release to prioritize patch", - }, - ]); - return new WaitProvider(scope, id, { serviceToken: lambdaFunction.functionArn }); } } diff --git a/source/package-lock.json b/source/package-lock.json index 9ac49a28..ce54c4b6 100644 --- a/source/package-lock.json +++ b/source/package-lock.json @@ -1,12 +1,12 @@ { "name": "aws-security-hub-automated-response-and-remediation", - "version": "2.0.2", + "version": "2.1.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "aws-security-hub-automated-response-and-remediation", - "version": "2.0.2", + "version": "2.1.0", "license": "Apache-2.0", "bin": { "solution_deploy": "bin/solution_deploy.js" @@ -16,7 +16,7 @@ "@cdklabs/cdk-ssm-documents": "^0.0.40", "@types/jest": "^29.4.0", "@types/js-yaml": "^4.0.5", - "@types/node": "^18.11.18", + "@types/node": "^20.1.0", "@types/prettier": "^2.7.2", "@typescript-eslint/eslint-plugin": "^5.49.0", "aws-cdk": "^2.88.0", @@ -27,7 +27,7 @@ "eslint-config-prettier": "^8.6.0", "eslint-plugin-header": "^3.1.1", "eslint-plugin-import": "^2.27.5", - "eslint-plugin-prettier": "^4.2.1", + "eslint-plugin-prettier": "^5.0.0", "jest": "^29.4.0", "js-yaml": "^4.1.0", "source-map-support": "^0.5.21", @@ -1475,9 +1475,9 @@ } }, "node_modules/@eslint/js": { - "version": "8.52.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.52.0.tgz", - "integrity": "sha512-mjZVbpaeMZludF2fsWLD0Z9gCref1Tk4i9+wddjRvpUNqqcndPkBD09N/Mapey0b3jaXbLm2kICwFv2E64QinA==", + "version": "8.51.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.51.0.tgz", + "integrity": "sha512-HxjQ8Qn+4SI3/AFv6sOrDB+g6PpUTDwSJiQqOrnneEk8L71161srI9gjzzZvYVbzHiVg/BvcH95+cK/zfIt4pg==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" @@ -1985,6 +1985,32 @@ "node": ">= 8" } }, + "node_modules/@pkgr/utils": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/@pkgr/utils/-/utils-2.4.2.tgz", + "integrity": "sha512-POgTXhjrTfbTV63DiFXav4lBHiICLKKwDeaKn9Nphwj7WH6m0hMMCaJkMyRWjgtPFyRKRVoMXXjczsTQRDEhYw==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "fast-glob": "^3.3.0", + "is-glob": "^4.0.3", + "open": "^9.1.0", + "picocolors": "^1.0.0", + "tslib": "^2.6.0" + }, + "engines": { + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/unts" + } + }, + "node_modules/@pkgr/utils/node_modules/tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==", + "dev": true + }, "node_modules/@sinclair/typebox": { "version": "0.27.8", "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", @@ -2136,10 +2162,13 @@ "dev": true }, "node_modules/@types/node": { - "version": "18.18.6", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.18.6.tgz", - "integrity": "sha512-wf3Vz+jCmOQ2HV1YUJuCWdL64adYxumkrxtc+H1VUQlnQI04+5HtH+qZCOE21lBE7gIrt+CwX2Wv8Acrw5Ak6w==", - "dev": true + "version": "20.8.8", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.8.8.tgz", + "integrity": "sha512-YRsdVxq6OaLfmR9Hy816IMp33xOBjfyOgUd77ehqg96CFywxAPbDbXvAsuN2KVg2HOT8Eh6uAfU+l4WffwPVrQ==", + "dev": true, + "dependencies": { + "undici-types": "~5.25.1" + } }, "node_modules/@types/prettier": { "version": "2.7.3", @@ -2363,12 +2392,6 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@ungap/structured-clone": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", - "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==", - "dev": true - }, "node_modules/acorn": { "version": "8.10.0", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz", @@ -3132,6 +3155,27 @@ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "dev": true }, + "node_modules/big-integer": { + "version": "1.6.51", + "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.51.tgz", + "integrity": "sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg==", + "dev": true, + "engines": { + "node": ">=0.6" + } + }, + "node_modules/bplist-parser": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/bplist-parser/-/bplist-parser-0.2.0.tgz", + "integrity": "sha512-z0M+byMThzQmD9NILRniCUXYsYpjwnlO8N5uCFaCqIOpqRsJCrQL9NK3JsD67CN5a08nF5oIL2bD6loTdHOuKw==", + "dev": true, + "dependencies": { + "big-integer": "^1.6.44" + }, + "engines": { + "node": ">= 5.10.0" + } + }, "node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -3213,6 +3257,21 @@ "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", "dev": true }, + "node_modules/bundle-name": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bundle-name/-/bundle-name-3.0.0.tgz", + "integrity": "sha512-PKA4BeSvBpQKQ8iPOGCSiell+N8P+Tf1DlwqmYhpe2gAhKPHn8EYOxVT+ShuGmhg8lN8XiSlS80yiExKXrURlw==", + "dev": true, + "dependencies": { + "run-applescript": "^5.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/call-bind": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.5.tgz", @@ -3246,9 +3305,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001553", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001553.tgz", - "integrity": "sha512-N0ttd6TrFfuqKNi+pMgWJTb9qrdJu4JSpgPFLe/lrD19ugC6fZgF0pUewRowDwzdDnb9V41mFcdlYgl/PyKf4A==", + "version": "1.0.30001551", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001551.tgz", + "integrity": "sha512-vtBAez47BoGMMzlbYhfXrMV1kvRF2WP/lqiMuDu1Sb4EE4LKEgjopFDSRtZfdVnslNRpOqV/woE+Xgrwj6VQlg==", "dev": true, "funding": [ { @@ -3266,9 +3325,9 @@ ] }, "node_modules/cdk-nag": { - "version": "2.27.171", - "resolved": "https://registry.npmjs.org/cdk-nag/-/cdk-nag-2.27.171.tgz", - "integrity": "sha512-3h87and56VWB3ukOk9BgFVBt6+OgRK76UEHAjzQz8QZecsgf9qADA4Ec9lXrK1pZEyX80/4OEFo1HP8pbRBiKQ==", + "version": "2.27.167", + "resolved": "https://registry.npmjs.org/cdk-nag/-/cdk-nag-2.27.167.tgz", + "integrity": "sha512-NI7q1R2ekVCvkk9hB3bunNTUZ/Ko4s/jmanZHrNSbQ82X9y5SZ0G85n+phY716j07PUXevQ3r53cfhrioyUPaA==", "dev": true, "peerDependencies": { "aws-cdk-lib": "^2.78.0", @@ -3477,6 +3536,150 @@ "node": ">=0.10.0" } }, + "node_modules/default-browser": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/default-browser/-/default-browser-4.0.0.tgz", + "integrity": "sha512-wX5pXO1+BrhMkSbROFsyxUm0i/cJEScyNhA4PPxc41ICuv05ZZB/MX28s8aZx6xjmatvebIapF6hLEKEcpneUA==", + "dev": true, + "dependencies": { + "bundle-name": "^3.0.0", + "default-browser-id": "^3.0.0", + "execa": "^7.1.1", + "titleize": "^3.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/default-browser-id": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/default-browser-id/-/default-browser-id-3.0.0.tgz", + "integrity": "sha512-OZ1y3y0SqSICtE8DE4S8YOE9UZOJ8wO16fKWVP5J1Qz42kV9jcnMVFrEE/noXb/ss3Q4pZIH79kxofzyNNtUNA==", + "dev": true, + "dependencies": { + "bplist-parser": "^0.2.0", + "untildify": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/default-browser/node_modules/execa": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-7.2.0.tgz", + "integrity": "sha512-UduyVP7TLB5IcAQl+OzLyLcS/l32W/GLg+AhHJ+ow40FOk2U3SAllPwR44v4vmdFwIWqpdwxxpQbF1n5ta9seA==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.1", + "human-signals": "^4.3.0", + "is-stream": "^3.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^5.1.0", + "onetime": "^6.0.0", + "signal-exit": "^3.0.7", + "strip-final-newline": "^3.0.0" + }, + "engines": { + "node": "^14.18.0 || ^16.14.0 || >=18.0.0" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/default-browser/node_modules/human-signals": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-4.3.1.tgz", + "integrity": "sha512-nZXjEF2nbo7lIw3mgYjItAfgQXog3OjJogSbKa2CQIIvSGWcKgeJnQlNXip6NglNzYH45nSRiEVimMvYL8DDqQ==", + "dev": true, + "engines": { + "node": ">=14.18.0" + } + }, + "node_modules/default-browser/node_modules/is-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/default-browser/node_modules/mimic-fn": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", + "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/default-browser/node_modules/npm-run-path": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.1.0.tgz", + "integrity": "sha512-sJOdmRGrY2sjNTRMbSvluQqg+8X7ZK61yvzBEIDhz4f8z1TZFYABsqjjCBd/0PUNE9M6QDgHJXQkGUEm7Q+l9Q==", + "dev": true, + "dependencies": { + "path-key": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/default-browser/node_modules/onetime": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", + "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", + "dev": true, + "dependencies": { + "mimic-fn": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/default-browser/node_modules/path-key": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", + "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/default-browser/node_modules/strip-final-newline": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", + "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/define-data-property": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.1.tgz", @@ -3491,6 +3694,18 @@ "node": ">= 0.4" } }, + "node_modules/define-lazy-prop": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-3.0.0.tgz", + "integrity": "sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/define-properties": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", @@ -3560,9 +3775,9 @@ } }, "node_modules/electron-to-chromium": { - "version": "1.4.565", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.565.tgz", - "integrity": "sha512-XbMoT6yIvg2xzcbs5hCADi0dXBh4//En3oFXmtPX+jiyyiCTiM9DGFT2SLottjpEs9Z8Mh8SqahbR96MaHfuSg==", + "version": "1.4.562", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.562.tgz", + "integrity": "sha512-kMGVZLP65O2/oH7zzaoIA5hcr4/xPYO6Sa83FrIpWcd7YPPtSlxqwxTd8lJIwKxaiXM6FGsYK4ukyJ40XkW7jg==", "dev": true }, "node_modules/emittery": { @@ -3593,26 +3808,26 @@ } }, "node_modules/es-abstract": { - "version": "1.22.3", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.3.tgz", - "integrity": "sha512-eiiY8HQeYfYH2Con2berK+To6GrK2RxbPawDkGq4UiCQQfZHb6wX9qQqkbpPqaxQFcl8d9QzZqo0tGE0VcrdwA==", + "version": "1.22.2", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.2.tgz", + "integrity": "sha512-YoxfFcDmhjOgWPWsV13+2RNjq1F6UQnfs+8TftwNqtzlmFzEXvlUwdrNrYeaizfjQzRMxkZ6ElWMOJIFKdVqwA==", "dev": true, "dependencies": { "array-buffer-byte-length": "^1.0.0", "arraybuffer.prototype.slice": "^1.0.2", "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.5", + "call-bind": "^1.0.2", "es-set-tostringtag": "^2.0.1", "es-to-primitive": "^1.2.1", "function.prototype.name": "^1.1.6", - "get-intrinsic": "^1.2.2", + "get-intrinsic": "^1.2.1", "get-symbol-description": "^1.0.0", "globalthis": "^1.0.3", "gopd": "^1.0.1", + "has": "^1.0.3", "has-property-descriptors": "^1.0.0", "has-proto": "^1.0.1", "has-symbols": "^1.0.3", - "hasown": "^2.0.0", "internal-slot": "^1.0.5", "is-array-buffer": "^3.0.2", "is-callable": "^1.2.7", @@ -3622,7 +3837,7 @@ "is-string": "^1.0.7", "is-typed-array": "^1.1.12", "is-weakref": "^1.0.2", - "object-inspect": "^1.13.1", + "object-inspect": "^1.12.3", "object-keys": "^1.1.1", "object.assign": "^4.1.4", "regexp.prototype.flags": "^1.5.1", @@ -3636,7 +3851,7 @@ "typed-array-byte-offset": "^1.0.0", "typed-array-length": "^1.0.4", "unbox-primitive": "^1.0.2", - "which-typed-array": "^1.1.13" + "which-typed-array": "^1.1.11" }, "engines": { "node": ">= 0.4" @@ -3646,26 +3861,26 @@ } }, "node_modules/es-set-tostringtag": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.2.tgz", - "integrity": "sha512-BuDyupZt65P9D2D2vA/zqcI3G5xRsklm5N3xCwuiy+/vKy8i0ifdsQP1sLgO4tZDSCaQUSnmC48khknGMV3D2Q==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz", + "integrity": "sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg==", "dev": true, "dependencies": { - "get-intrinsic": "^1.2.2", - "has-tostringtag": "^1.0.0", - "hasown": "^2.0.0" + "get-intrinsic": "^1.1.3", + "has": "^1.0.3", + "has-tostringtag": "^1.0.0" }, "engines": { "node": ">= 0.4" } }, "node_modules/es-shim-unscopables": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.2.tgz", - "integrity": "sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz", + "integrity": "sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==", "dev": true, "dependencies": { - "hasown": "^2.0.0" + "has": "^1.0.3" } }, "node_modules/es-to-primitive": { @@ -3707,19 +3922,18 @@ } }, "node_modules/eslint": { - "version": "8.52.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.52.0.tgz", - "integrity": "sha512-zh/JHnaixqHZsolRB/w9/02akBk9EPrOs9JwcTP2ek7yL5bVvXuRariiaAjjoJ5DvuwQ1WAE/HsMz+w17YgBCg==", + "version": "8.51.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.51.0.tgz", + "integrity": "sha512-2WuxRZBrlwnXi+/vFSJyjMqrNjtJqiasMzehF0shoLaW7DzS3/9Yvrmq5JiT66+pNjiX4UBnLDiKHcWAr/OInA==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", "@eslint/eslintrc": "^2.1.2", - "@eslint/js": "8.52.0", - "@humanwhocodes/config-array": "^0.11.13", + "@eslint/js": "8.51.0", + "@humanwhocodes/config-array": "^0.11.11", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", - "@ungap/structured-clone": "^1.2.0", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", @@ -3829,26 +4043,26 @@ } }, "node_modules/eslint-plugin-import": { - "version": "2.29.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.29.0.tgz", - "integrity": "sha512-QPOO5NO6Odv5lpoTkddtutccQjysJuFxoPS7fAHO+9m9udNHvTCPSAMW9zGAYj8lAIdr40I8yPCdUYrncXtrwg==", + "version": "2.28.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.28.1.tgz", + "integrity": "sha512-9I9hFlITvOV55alzoKBI+K9q74kv0iKMeY6av5+umsNwayt59fz692daGyjR+oStBQgx6nwR9rXldDev3Clw+A==", "dev": true, "dependencies": { - "array-includes": "^3.1.7", - "array.prototype.findlastindex": "^1.2.3", - "array.prototype.flat": "^1.3.2", - "array.prototype.flatmap": "^1.3.2", + "array-includes": "^3.1.6", + "array.prototype.findlastindex": "^1.2.2", + "array.prototype.flat": "^1.3.1", + "array.prototype.flatmap": "^1.3.1", "debug": "^3.2.7", "doctrine": "^2.1.0", - "eslint-import-resolver-node": "^0.3.9", + "eslint-import-resolver-node": "^0.3.7", "eslint-module-utils": "^2.8.0", - "hasown": "^2.0.0", - "is-core-module": "^2.13.1", + "has": "^1.0.3", + "is-core-module": "^2.13.0", "is-glob": "^4.0.3", "minimatch": "^3.1.2", - "object.fromentries": "^2.0.7", - "object.groupby": "^1.0.1", - "object.values": "^1.1.7", + "object.fromentries": "^2.0.6", + "object.groupby": "^1.0.0", + "object.values": "^1.1.6", "semver": "^6.3.1", "tsconfig-paths": "^3.14.2" }, @@ -3890,21 +4104,29 @@ } }, "node_modules/eslint-plugin-prettier": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-4.2.1.tgz", - "integrity": "sha512-f/0rXLXUt0oFYs8ra4w49wYZBG5GKZpAYsJSm6rnYL5uVDjd+zowwMwVZHnAjf4edNrKpCDYfXDgmRE/Ak7QyQ==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-5.0.1.tgz", + "integrity": "sha512-m3u5RnR56asrwV/lDC4GHorlW75DsFfmUcjfCYylTUs85dBRnB7VM6xG8eCMJdeDRnppzmxZVf1GEPJvl1JmNg==", "dev": true, "dependencies": { - "prettier-linter-helpers": "^1.0.0" + "prettier-linter-helpers": "^1.0.0", + "synckit": "^0.8.5" }, "engines": { - "node": ">=12.0.0" + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/prettier" }, "peerDependencies": { - "eslint": ">=7.28.0", - "prettier": ">=2.0.0" + "@types/eslint": ">=8.0.0", + "eslint": ">=8.0.0", + "prettier": ">=3.0.0" }, "peerDependenciesMeta": { + "@types/eslint": { + "optional": true + }, "eslint-config-prettier": { "optional": true } @@ -4312,15 +4534,15 @@ } }, "node_modules/get-intrinsic": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.2.tgz", - "integrity": "sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.1.tgz", + "integrity": "sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw==", "dev": true, "dependencies": { - "function-bind": "^1.1.2", + "function-bind": "^1.1.1", + "has": "^1.0.3", "has-proto": "^1.0.1", - "has-symbols": "^1.0.3", - "hasown": "^2.0.0" + "has-symbols": "^1.0.3" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -4469,6 +4691,15 @@ "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", "dev": true }, + "node_modules/has": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.4.tgz", + "integrity": "sha512-qdSAmqLF6209RFj4VVItywPMbm3vWylknmB3nvNiUIs72xAimcM8nVYxYr7ncvZq5qzk9MKIZR8ijqD/1QuYjQ==", + "dev": true, + "engines": { + "node": ">= 0.4.0" + } + }, "node_modules/has-bigints": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", @@ -4488,12 +4719,12 @@ } }, "node_modules/has-property-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.1.tgz", - "integrity": "sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", + "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", "dev": true, "dependencies": { - "get-intrinsic": "^1.2.2" + "get-intrinsic": "^1.1.1" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -4538,18 +4769,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/hasown": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz", - "integrity": "sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==", - "dev": true, - "dependencies": { - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, "node_modules/html-escaper": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", @@ -4635,13 +4854,13 @@ "dev": true }, "node_modules/internal-slot": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.6.tgz", - "integrity": "sha512-Xj6dv+PsbtwyPpEflsejS+oIZxmMlV44zAhG479uYu89MsjcYOhCFnNyKrkJrihbsiasQyY0afoCl/9BLR65bg==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.5.tgz", + "integrity": "sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ==", "dev": true, "dependencies": { - "get-intrinsic": "^1.2.2", - "hasown": "^2.0.0", + "get-intrinsic": "^1.2.0", + "has": "^1.0.3", "side-channel": "^1.0.4" }, "engines": { @@ -4709,12 +4928,12 @@ } }, "node_modules/is-core-module": { - "version": "2.13.1", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz", - "integrity": "sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==", + "version": "2.13.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.0.tgz", + "integrity": "sha512-Z7dk6Qo8pOCp3l4tsX2C5ZVas4V+UxwQodwZhLopL91TX8UyyHEXafPcyoeeWuLrwzHcr3igO78wNLwHJHsMCQ==", "dev": true, "dependencies": { - "hasown": "^2.0.0" + "has": "^1.0.3" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -4735,6 +4954,21 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-docker": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-3.0.0.tgz", + "integrity": "sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==", + "dev": true, + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", @@ -4774,6 +5008,24 @@ "node": ">=0.10.0" } }, + "node_modules/is-inside-container": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-inside-container/-/is-inside-container-1.0.0.tgz", + "integrity": "sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==", + "dev": true, + "dependencies": { + "is-docker": "^3.0.0" + }, + "bin": { + "is-inside-container": "cli.js" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-negative-zero": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", @@ -4916,6 +5168,33 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "dev": true, + "dependencies": { + "is-docker": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-wsl/node_modules/is-docker": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", + "dev": true, + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/isarray": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", @@ -5952,6 +6231,24 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/open": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/open/-/open-9.1.0.tgz", + "integrity": "sha512-OS+QTnw1/4vrf+9hh1jc1jnYjzSG4ttTBB8UxOwAnInG3Uo4ssetzC1ihqaIHjLJnA5GGlRl6QlZXOTQhRBUvg==", + "dev": true, + "dependencies": { + "default-browser": "^4.0.0", + "define-lazy-prop": "^3.0.0", + "is-inside-container": "^1.0.0", + "is-wsl": "^2.2.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/optionator": { "version": "0.9.3", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", @@ -6405,6 +6702,21 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/run-applescript": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/run-applescript/-/run-applescript-5.0.0.tgz", + "integrity": "sha512-XcT5rBksx1QdIhlFOCtgZkB99ZEouFZ1E2Kc2LHqNW13U3/74YGdkQRmThTwxy4QIyookibDKYZOPqX//6BlAg==", + "dev": true, + "dependencies": { + "execa": "^5.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", @@ -6762,6 +7074,28 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/synckit": { + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.8.5.tgz", + "integrity": "sha512-L1dapNV6vu2s/4Sputv8xGsCdAVlb5nRDMFU/E27D44l5U6cw1g0dGd45uLc+OXjNMmF4ntiMdCimzcjFKQI8Q==", + "dev": true, + "dependencies": { + "@pkgr/utils": "^2.3.1", + "tslib": "^2.5.0" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/unts" + } + }, + "node_modules/synckit/node_modules/tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==", + "dev": true + }, "node_modules/test-exclude": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", @@ -6782,6 +7116,18 @@ "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", "dev": true }, + "node_modules/titleize": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/titleize/-/titleize-3.0.0.tgz", + "integrity": "sha512-KxVu8EYHDPBdUYdKZdKtU2aj2XfEx9AfjXxE/Aj0vT06w2icA09Vus1rh6eSu1y01akYg6BjIK/hxyLJINoMLQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/tmpl": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", @@ -7075,6 +7421,21 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/undici-types": { + "version": "5.25.3", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.25.3.tgz", + "integrity": "sha512-Ga1jfYwRn7+cP9v8auvEXN1rX3sWqlayd4HP7OKk4mZWylEmu3KzXDUGrQUN6Ol7qo1gPvB2e5gX6udnyEPgdA==", + "dev": true + }, + "node_modules/untildify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", + "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "node_modules/update-browserslist-db": { "version": "1.0.13", "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.13.tgz", @@ -8328,9 +8689,9 @@ } }, "@eslint/js": { - "version": "8.52.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.52.0.tgz", - "integrity": "sha512-mjZVbpaeMZludF2fsWLD0Z9gCref1Tk4i9+wddjRvpUNqqcndPkBD09N/Mapey0b3jaXbLm2kICwFv2E64QinA==", + "version": "8.51.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.51.0.tgz", + "integrity": "sha512-HxjQ8Qn+4SI3/AFv6sOrDB+g6PpUTDwSJiQqOrnneEk8L71161srI9gjzzZvYVbzHiVg/BvcH95+cK/zfIt4pg==", "dev": true }, "@humanwhocodes/config-array": { @@ -8724,6 +9085,28 @@ "fastq": "^1.6.0" } }, + "@pkgr/utils": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/@pkgr/utils/-/utils-2.4.2.tgz", + "integrity": "sha512-POgTXhjrTfbTV63DiFXav4lBHiICLKKwDeaKn9Nphwj7WH6m0hMMCaJkMyRWjgtPFyRKRVoMXXjczsTQRDEhYw==", + "dev": true, + "requires": { + "cross-spawn": "^7.0.3", + "fast-glob": "^3.3.0", + "is-glob": "^4.0.3", + "open": "^9.1.0", + "picocolors": "^1.0.0", + "tslib": "^2.6.0" + }, + "dependencies": { + "tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==", + "dev": true + } + } + }, "@sinclair/typebox": { "version": "0.27.8", "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", @@ -8875,10 +9258,13 @@ "dev": true }, "@types/node": { - "version": "18.18.6", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.18.6.tgz", - "integrity": "sha512-wf3Vz+jCmOQ2HV1YUJuCWdL64adYxumkrxtc+H1VUQlnQI04+5HtH+qZCOE21lBE7gIrt+CwX2Wv8Acrw5Ak6w==", - "dev": true + "version": "20.8.8", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.8.8.tgz", + "integrity": "sha512-YRsdVxq6OaLfmR9Hy816IMp33xOBjfyOgUd77ehqg96CFywxAPbDbXvAsuN2KVg2HOT8Eh6uAfU+l4WffwPVrQ==", + "dev": true, + "requires": { + "undici-types": "~5.25.1" + } }, "@types/prettier": { "version": "2.7.3", @@ -9013,12 +9399,6 @@ "eslint-visitor-keys": "^3.3.0" } }, - "@ungap/structured-clone": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", - "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==", - "dev": true - }, "acorn": { "version": "8.10.0", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz", @@ -9547,6 +9927,21 @@ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "dev": true }, + "big-integer": { + "version": "1.6.51", + "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.51.tgz", + "integrity": "sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg==", + "dev": true + }, + "bplist-parser": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/bplist-parser/-/bplist-parser-0.2.0.tgz", + "integrity": "sha512-z0M+byMThzQmD9NILRniCUXYsYpjwnlO8N5uCFaCqIOpqRsJCrQL9NK3JsD67CN5a08nF5oIL2bD6loTdHOuKw==", + "dev": true, + "requires": { + "big-integer": "^1.6.44" + } + }, "brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -9602,6 +9997,15 @@ "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", "dev": true }, + "bundle-name": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bundle-name/-/bundle-name-3.0.0.tgz", + "integrity": "sha512-PKA4BeSvBpQKQ8iPOGCSiell+N8P+Tf1DlwqmYhpe2gAhKPHn8EYOxVT+ShuGmhg8lN8XiSlS80yiExKXrURlw==", + "dev": true, + "requires": { + "run-applescript": "^5.0.0" + } + }, "call-bind": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.5.tgz", @@ -9626,15 +10030,15 @@ "dev": true }, "caniuse-lite": { - "version": "1.0.30001553", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001553.tgz", - "integrity": "sha512-N0ttd6TrFfuqKNi+pMgWJTb9qrdJu4JSpgPFLe/lrD19ugC6fZgF0pUewRowDwzdDnb9V41mFcdlYgl/PyKf4A==", + "version": "1.0.30001551", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001551.tgz", + "integrity": "sha512-vtBAez47BoGMMzlbYhfXrMV1kvRF2WP/lqiMuDu1Sb4EE4LKEgjopFDSRtZfdVnslNRpOqV/woE+Xgrwj6VQlg==", "dev": true }, "cdk-nag": { - "version": "2.27.171", - "resolved": "https://registry.npmjs.org/cdk-nag/-/cdk-nag-2.27.171.tgz", - "integrity": "sha512-3h87and56VWB3ukOk9BgFVBt6+OgRK76UEHAjzQz8QZecsgf9qADA4Ec9lXrK1pZEyX80/4OEFo1HP8pbRBiKQ==", + "version": "2.27.167", + "resolved": "https://registry.npmjs.org/cdk-nag/-/cdk-nag-2.27.167.tgz", + "integrity": "sha512-NI7q1R2ekVCvkk9hB3bunNTUZ/Ko4s/jmanZHrNSbQ82X9y5SZ0G85n+phY716j07PUXevQ3r53cfhrioyUPaA==", "dev": true, "requires": {} }, @@ -9782,6 +10186,95 @@ "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", "dev": true }, + "default-browser": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/default-browser/-/default-browser-4.0.0.tgz", + "integrity": "sha512-wX5pXO1+BrhMkSbROFsyxUm0i/cJEScyNhA4PPxc41ICuv05ZZB/MX28s8aZx6xjmatvebIapF6hLEKEcpneUA==", + "dev": true, + "requires": { + "bundle-name": "^3.0.0", + "default-browser-id": "^3.0.0", + "execa": "^7.1.1", + "titleize": "^3.0.0" + }, + "dependencies": { + "execa": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-7.2.0.tgz", + "integrity": "sha512-UduyVP7TLB5IcAQl+OzLyLcS/l32W/GLg+AhHJ+ow40FOk2U3SAllPwR44v4vmdFwIWqpdwxxpQbF1n5ta9seA==", + "dev": true, + "requires": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.1", + "human-signals": "^4.3.0", + "is-stream": "^3.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^5.1.0", + "onetime": "^6.0.0", + "signal-exit": "^3.0.7", + "strip-final-newline": "^3.0.0" + } + }, + "human-signals": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-4.3.1.tgz", + "integrity": "sha512-nZXjEF2nbo7lIw3mgYjItAfgQXog3OjJogSbKa2CQIIvSGWcKgeJnQlNXip6NglNzYH45nSRiEVimMvYL8DDqQ==", + "dev": true + }, + "is-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", + "dev": true + }, + "mimic-fn": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", + "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", + "dev": true + }, + "npm-run-path": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.1.0.tgz", + "integrity": "sha512-sJOdmRGrY2sjNTRMbSvluQqg+8X7ZK61yvzBEIDhz4f8z1TZFYABsqjjCBd/0PUNE9M6QDgHJXQkGUEm7Q+l9Q==", + "dev": true, + "requires": { + "path-key": "^4.0.0" + } + }, + "onetime": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", + "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", + "dev": true, + "requires": { + "mimic-fn": "^4.0.0" + } + }, + "path-key": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", + "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", + "dev": true + }, + "strip-final-newline": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", + "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", + "dev": true + } + } + }, + "default-browser-id": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/default-browser-id/-/default-browser-id-3.0.0.tgz", + "integrity": "sha512-OZ1y3y0SqSICtE8DE4S8YOE9UZOJ8wO16fKWVP5J1Qz42kV9jcnMVFrEE/noXb/ss3Q4pZIH79kxofzyNNtUNA==", + "dev": true, + "requires": { + "bplist-parser": "^0.2.0", + "untildify": "^4.0.0" + } + }, "define-data-property": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.1.tgz", @@ -9793,6 +10286,12 @@ "has-property-descriptors": "^1.0.0" } }, + "define-lazy-prop": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-3.0.0.tgz", + "integrity": "sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg==", + "dev": true + }, "define-properties": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", @@ -9841,9 +10340,9 @@ } }, "electron-to-chromium": { - "version": "1.4.565", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.565.tgz", - "integrity": "sha512-XbMoT6yIvg2xzcbs5hCADi0dXBh4//En3oFXmtPX+jiyyiCTiM9DGFT2SLottjpEs9Z8Mh8SqahbR96MaHfuSg==", + "version": "1.4.562", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.562.tgz", + "integrity": "sha512-kMGVZLP65O2/oH7zzaoIA5hcr4/xPYO6Sa83FrIpWcd7YPPtSlxqwxTd8lJIwKxaiXM6FGsYK4ukyJ40XkW7jg==", "dev": true }, "emittery": { @@ -9868,26 +10367,26 @@ } }, "es-abstract": { - "version": "1.22.3", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.3.tgz", - "integrity": "sha512-eiiY8HQeYfYH2Con2berK+To6GrK2RxbPawDkGq4UiCQQfZHb6wX9qQqkbpPqaxQFcl8d9QzZqo0tGE0VcrdwA==", + "version": "1.22.2", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.2.tgz", + "integrity": "sha512-YoxfFcDmhjOgWPWsV13+2RNjq1F6UQnfs+8TftwNqtzlmFzEXvlUwdrNrYeaizfjQzRMxkZ6ElWMOJIFKdVqwA==", "dev": true, "requires": { "array-buffer-byte-length": "^1.0.0", "arraybuffer.prototype.slice": "^1.0.2", "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.5", + "call-bind": "^1.0.2", "es-set-tostringtag": "^2.0.1", "es-to-primitive": "^1.2.1", "function.prototype.name": "^1.1.6", - "get-intrinsic": "^1.2.2", + "get-intrinsic": "^1.2.1", "get-symbol-description": "^1.0.0", "globalthis": "^1.0.3", "gopd": "^1.0.1", + "has": "^1.0.3", "has-property-descriptors": "^1.0.0", "has-proto": "^1.0.1", "has-symbols": "^1.0.3", - "hasown": "^2.0.0", "internal-slot": "^1.0.5", "is-array-buffer": "^3.0.2", "is-callable": "^1.2.7", @@ -9897,7 +10396,7 @@ "is-string": "^1.0.7", "is-typed-array": "^1.1.12", "is-weakref": "^1.0.2", - "object-inspect": "^1.13.1", + "object-inspect": "^1.12.3", "object-keys": "^1.1.1", "object.assign": "^4.1.4", "regexp.prototype.flags": "^1.5.1", @@ -9911,27 +10410,27 @@ "typed-array-byte-offset": "^1.0.0", "typed-array-length": "^1.0.4", "unbox-primitive": "^1.0.2", - "which-typed-array": "^1.1.13" + "which-typed-array": "^1.1.11" } }, "es-set-tostringtag": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.2.tgz", - "integrity": "sha512-BuDyupZt65P9D2D2vA/zqcI3G5xRsklm5N3xCwuiy+/vKy8i0ifdsQP1sLgO4tZDSCaQUSnmC48khknGMV3D2Q==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz", + "integrity": "sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg==", "dev": true, "requires": { - "get-intrinsic": "^1.2.2", - "has-tostringtag": "^1.0.0", - "hasown": "^2.0.0" + "get-intrinsic": "^1.1.3", + "has": "^1.0.3", + "has-tostringtag": "^1.0.0" } }, "es-shim-unscopables": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.2.tgz", - "integrity": "sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz", + "integrity": "sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==", "dev": true, "requires": { - "hasown": "^2.0.0" + "has": "^1.0.3" } }, "es-to-primitive": { @@ -9958,19 +10457,18 @@ "dev": true }, "eslint": { - "version": "8.52.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.52.0.tgz", - "integrity": "sha512-zh/JHnaixqHZsolRB/w9/02akBk9EPrOs9JwcTP2ek7yL5bVvXuRariiaAjjoJ5DvuwQ1WAE/HsMz+w17YgBCg==", + "version": "8.51.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.51.0.tgz", + "integrity": "sha512-2WuxRZBrlwnXi+/vFSJyjMqrNjtJqiasMzehF0shoLaW7DzS3/9Yvrmq5JiT66+pNjiX4UBnLDiKHcWAr/OInA==", "dev": true, "requires": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", "@eslint/eslintrc": "^2.1.2", - "@eslint/js": "8.52.0", - "@humanwhocodes/config-array": "^0.11.13", + "@eslint/js": "8.51.0", + "@humanwhocodes/config-array": "^0.11.11", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", - "@ungap/structured-clone": "^1.2.0", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", @@ -10078,26 +10576,26 @@ "requires": {} }, "eslint-plugin-import": { - "version": "2.29.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.29.0.tgz", - "integrity": "sha512-QPOO5NO6Odv5lpoTkddtutccQjysJuFxoPS7fAHO+9m9udNHvTCPSAMW9zGAYj8lAIdr40I8yPCdUYrncXtrwg==", + "version": "2.28.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.28.1.tgz", + "integrity": "sha512-9I9hFlITvOV55alzoKBI+K9q74kv0iKMeY6av5+umsNwayt59fz692daGyjR+oStBQgx6nwR9rXldDev3Clw+A==", "dev": true, "requires": { - "array-includes": "^3.1.7", - "array.prototype.findlastindex": "^1.2.3", - "array.prototype.flat": "^1.3.2", - "array.prototype.flatmap": "^1.3.2", + "array-includes": "^3.1.6", + "array.prototype.findlastindex": "^1.2.2", + "array.prototype.flat": "^1.3.1", + "array.prototype.flatmap": "^1.3.1", "debug": "^3.2.7", "doctrine": "^2.1.0", - "eslint-import-resolver-node": "^0.3.9", + "eslint-import-resolver-node": "^0.3.7", "eslint-module-utils": "^2.8.0", - "hasown": "^2.0.0", - "is-core-module": "^2.13.1", + "has": "^1.0.3", + "is-core-module": "^2.13.0", "is-glob": "^4.0.3", "minimatch": "^3.1.2", - "object.fromentries": "^2.0.7", - "object.groupby": "^1.0.1", - "object.values": "^1.1.7", + "object.fromentries": "^2.0.6", + "object.groupby": "^1.0.0", + "object.values": "^1.1.6", "semver": "^6.3.1", "tsconfig-paths": "^3.14.2" }, @@ -10129,12 +10627,13 @@ } }, "eslint-plugin-prettier": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-4.2.1.tgz", - "integrity": "sha512-f/0rXLXUt0oFYs8ra4w49wYZBG5GKZpAYsJSm6rnYL5uVDjd+zowwMwVZHnAjf4edNrKpCDYfXDgmRE/Ak7QyQ==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-5.0.1.tgz", + "integrity": "sha512-m3u5RnR56asrwV/lDC4GHorlW75DsFfmUcjfCYylTUs85dBRnB7VM6xG8eCMJdeDRnppzmxZVf1GEPJvl1JmNg==", "dev": true, "requires": { - "prettier-linter-helpers": "^1.0.0" + "prettier-linter-helpers": "^1.0.0", + "synckit": "^0.8.5" } }, "eslint-scope": { @@ -10422,15 +10921,15 @@ "dev": true }, "get-intrinsic": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.2.tgz", - "integrity": "sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.1.tgz", + "integrity": "sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw==", "dev": true, "requires": { - "function-bind": "^1.1.2", + "function-bind": "^1.1.1", + "has": "^1.0.3", "has-proto": "^1.0.1", - "has-symbols": "^1.0.3", - "hasown": "^2.0.0" + "has-symbols": "^1.0.3" } }, "get-package-type": { @@ -10531,6 +11030,12 @@ "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", "dev": true }, + "has": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.4.tgz", + "integrity": "sha512-qdSAmqLF6209RFj4VVItywPMbm3vWylknmB3nvNiUIs72xAimcM8nVYxYr7ncvZq5qzk9MKIZR8ijqD/1QuYjQ==", + "dev": true + }, "has-bigints": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", @@ -10544,12 +11049,12 @@ "dev": true }, "has-property-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.1.tgz", - "integrity": "sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", + "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", "dev": true, "requires": { - "get-intrinsic": "^1.2.2" + "get-intrinsic": "^1.1.1" } }, "has-proto": { @@ -10573,15 +11078,6 @@ "has-symbols": "^1.0.2" } }, - "hasown": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz", - "integrity": "sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==", - "dev": true, - "requires": { - "function-bind": "^1.1.2" - } - }, "html-escaper": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", @@ -10643,13 +11139,13 @@ "dev": true }, "internal-slot": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.6.tgz", - "integrity": "sha512-Xj6dv+PsbtwyPpEflsejS+oIZxmMlV44zAhG479uYu89MsjcYOhCFnNyKrkJrihbsiasQyY0afoCl/9BLR65bg==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.5.tgz", + "integrity": "sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ==", "dev": true, "requires": { - "get-intrinsic": "^1.2.2", - "hasown": "^2.0.0", + "get-intrinsic": "^1.2.0", + "has": "^1.0.3", "side-channel": "^1.0.4" } }, @@ -10696,12 +11192,12 @@ "dev": true }, "is-core-module": { - "version": "2.13.1", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz", - "integrity": "sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==", + "version": "2.13.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.0.tgz", + "integrity": "sha512-Z7dk6Qo8pOCp3l4tsX2C5ZVas4V+UxwQodwZhLopL91TX8UyyHEXafPcyoeeWuLrwzHcr3igO78wNLwHJHsMCQ==", "dev": true, "requires": { - "hasown": "^2.0.0" + "has": "^1.0.3" } }, "is-date-object": { @@ -10713,6 +11209,12 @@ "has-tostringtag": "^1.0.0" } }, + "is-docker": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-3.0.0.tgz", + "integrity": "sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==", + "dev": true + }, "is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", @@ -10740,6 +11242,15 @@ "is-extglob": "^2.1.1" } }, + "is-inside-container": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-inside-container/-/is-inside-container-1.0.0.tgz", + "integrity": "sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==", + "dev": true, + "requires": { + "is-docker": "^3.0.0" + } + }, "is-negative-zero": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", @@ -10828,6 +11339,23 @@ "call-bind": "^1.0.2" } }, + "is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "dev": true, + "requires": { + "is-docker": "^2.0.0" + }, + "dependencies": { + "is-docker": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", + "dev": true + } + } + }, "isarray": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", @@ -11635,6 +12163,18 @@ "mimic-fn": "^2.1.0" } }, + "open": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/open/-/open-9.1.0.tgz", + "integrity": "sha512-OS+QTnw1/4vrf+9hh1jc1jnYjzSG4ttTBB8UxOwAnInG3Uo4ssetzC1ihqaIHjLJnA5GGlRl6QlZXOTQhRBUvg==", + "dev": true, + "requires": { + "default-browser": "^4.0.0", + "define-lazy-prop": "^3.0.0", + "is-inside-container": "^1.0.0", + "is-wsl": "^2.2.0" + } + }, "optionator": { "version": "0.9.3", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", @@ -11937,6 +12477,15 @@ "glob": "^7.1.3" } }, + "run-applescript": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/run-applescript/-/run-applescript-5.0.0.tgz", + "integrity": "sha512-XcT5rBksx1QdIhlFOCtgZkB99ZEouFZ1E2Kc2LHqNW13U3/74YGdkQRmThTwxy4QIyookibDKYZOPqX//6BlAg==", + "dev": true, + "requires": { + "execa": "^5.0.0" + } + }, "run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", @@ -12197,6 +12746,24 @@ "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", "dev": true }, + "synckit": { + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.8.5.tgz", + "integrity": "sha512-L1dapNV6vu2s/4Sputv8xGsCdAVlb5nRDMFU/E27D44l5U6cw1g0dGd45uLc+OXjNMmF4ntiMdCimzcjFKQI8Q==", + "dev": true, + "requires": { + "@pkgr/utils": "^2.3.1", + "tslib": "^2.5.0" + }, + "dependencies": { + "tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==", + "dev": true + } + } + }, "test-exclude": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", @@ -12214,6 +12781,12 @@ "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", "dev": true }, + "titleize": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/titleize/-/titleize-3.0.0.tgz", + "integrity": "sha512-KxVu8EYHDPBdUYdKZdKtU2aj2XfEx9AfjXxE/Aj0vT06w2icA09Vus1rh6eSu1y01akYg6BjIK/hxyLJINoMLQ==", + "dev": true + }, "tmpl": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", @@ -12402,6 +12975,18 @@ "which-boxed-primitive": "^1.0.2" } }, + "undici-types": { + "version": "5.25.3", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.25.3.tgz", + "integrity": "sha512-Ga1jfYwRn7+cP9v8auvEXN1rX3sWqlayd4HP7OKk4mZWylEmu3KzXDUGrQUN6Ol7qo1gPvB2e5gX6udnyEPgdA==", + "dev": true + }, + "untildify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", + "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", + "dev": true + }, "update-browserslist-db": { "version": "1.0.13", "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.13.tgz", diff --git a/source/package.json b/source/package.json index c29ee8b8..3b2cf64a 100644 --- a/source/package.json +++ b/source/package.json @@ -1,6 +1,6 @@ { "name": "aws-security-hub-automated-response-and-remediation", - "version": "2.0.2", + "version": "2.1.0", "description": "Automated remediation for AWS Security Hub (SO0111)", "bin": { "solution_deploy": "bin/solution_deploy.js" @@ -22,7 +22,7 @@ "@cdklabs/cdk-ssm-documents": "^0.0.40", "@types/jest": "^29.4.0", "@types/js-yaml": "^4.0.5", - "@types/node": "^18.11.18", + "@types/node": "^20.1.0", "@types/prettier": "^2.7.2", "@typescript-eslint/eslint-plugin": "^5.49.0", "aws-cdk": "^2.88.0", @@ -33,7 +33,7 @@ "eslint-config-prettier": "^8.6.0", "eslint-plugin-header": "^3.1.1", "eslint-plugin-import": "^2.27.5", - "eslint-plugin-prettier": "^4.2.1", + "eslint-plugin-prettier": "^5.0.0", "jest": "^29.4.0", "js-yaml": "^4.1.0", "source-map-support": "^0.5.21", diff --git a/source/playbooks/AFSBP/README.md b/source/playbooks/AFSBP/README.md index e437d88c..0f417731 100644 --- a/source/playbooks/AFSBP/README.md +++ b/source/playbooks/AFSBP/README.md @@ -1,6 +1,6 @@ # AWS Foundational Security Best Practices v1.0.0 Playbook -The AWS Foundational Security Best Practices (AFSBP) playbook is part of the AWS Security Hub Automated Response and Remediation solution. It creates the necessary AWS resources for remediating the following Controls: +The AWS Foundational Security Best Practices (AWS FSBP) playbook is part of the AWS Security Hub Automated Response and Remediation solution. It creates the necessary AWS resources for remediating the following Controls: * AutoScaling.1 * CloudTrail.1 @@ -25,6 +25,7 @@ The AWS Foundational Security Best Practices (AFSBP) playbook is part of the AWS * S3.3 * S3.4 * S3.5 +* S3.9 See the [AWS Security Hub Automated Response and Remediation Implementation Guide](https://docs.aws.amazon.com/solutions/latest/aws-security-hub-automated-response-and-remediation/welcome.html) for more information on this Playbook. diff --git a/source/playbooks/AFSBP/bin/afsbp.ts b/source/playbooks/AFSBP/bin/afsbp.ts index 929291a2..ae7e5c39 100644 --- a/source/playbooks/AFSBP/bin/afsbp.ts +++ b/source/playbooks/AFSBP/bin/afsbp.ts @@ -26,17 +26,27 @@ cdk.Aspects.of(app).add(new cdk_nag.AwsSolutionsChecks()); const remediations: IControl[] = [ { control: 'AutoScaling.1' }, { control: 'CloudFormation.1' }, + { control: 'CloudFront.1' }, + { control: 'CloudFront.12' }, { control: 'CloudTrail.1' }, { control: 'CloudTrail.2' }, { control: 'CloudTrail.4' }, { control: 'CloudTrail.5' }, { control: 'CodeBuild.2' }, + { control: 'CodeBuild.5' }, { control: 'Config.1' }, { control: 'EC2.1' }, { control: 'EC2.2' }, + { control: 'EC2.4' }, { control: 'EC2.6' }, { control: 'EC2.7' }, + { control: 'EC2.8' }, { control: 'EC2.15' }, + { control: 'EC2.18' }, + { control: 'EC2.19' }, + { control: 'EC2.23' }, + { control: 'ECR.1' }, + { control: 'GuardDuty.1' }, { control: 'IAM.3' }, { control: 'IAM.7' }, { control: 'IAM.8' }, @@ -67,9 +77,16 @@ const remediations: IControl[] = [ control: 'S3.8', executes: 'S3.2', }, + { control: 'S3.9' }, + { control: 'S3.11' }, + { control: 'S3.13' }, + { control: 'SecretsManager.1' }, + { control: 'SecretsManager.3' }, + { control: 'SecretsManager.4' }, { control: 'SNS.1' }, { control: 'SNS.2' }, { control: 'SQS.1' }, + { control: 'SSM.4' }, ]; const adminStack = new PlaybookPrimaryStack(app, 'AFSBPStack', { diff --git a/source/playbooks/AFSBP/description.txt b/source/playbooks/AFSBP/description.txt index c956e4dc..20ce2d4a 100644 --- a/source/playbooks/AFSBP/description.txt +++ b/source/playbooks/AFSBP/description.txt @@ -1,3 +1,3 @@ AWS Security Hub Automated Response and Remediation is an add-on solution that enables AWS Security Hub customers to remediate security findings with a single click -using predefined response and remediation actions packaged in “Playbooks”. This Playbook, AFSBP v1.0.0, includes remediations for many of the AWS Foundational Security Best Practices v1.0.0 findings. +using predefined response and remediation actions packaged in “Playbooks”. This Playbook, AWS FSBP v1.0.0, includes remediations for many of the AWS Foundational Security Best Practices v1.0.0 findings. diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_AutoScaling.1.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_AutoScaling.1.yaml index b18ab198..98dc5666 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_AutoScaling.1.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_AutoScaling.1.yaml @@ -17,7 +17,7 @@ description: | * Remediation.Output ## Documentation Links - * [AFSBP AutoScaling.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-autoscaling-1) + * [AWS FSBP AutoScaling.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-autoscaling-1) schemaVersion: '0.3' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudFormation.1.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudFormation.1.yaml index 82b78186..4830780a 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudFormation.1.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudFormation.1.yaml @@ -15,7 +15,7 @@ description: | * Remediation.Output ## Documentation Links - * [AFSBP v1.0.0 CloudFormation.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudformation-1) + * [AWS FSBP v1.0.0 CloudFormation.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudformation-1) schemaVersion: '0.3' assumeRole: '{{ AutomationAssumeRole }}' parameters: diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudFront.1.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudFront.1.yaml new file mode 100644 index 00000000..a3ae0318 --- /dev/null +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudFront.1.yaml @@ -0,0 +1,85 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +description: | + ### Document Name - ASR-AFSBP_1.0.0_CloudFront.1 + + ## What does this document do? + This document configures a default root object to be returned when visiting a CloudFront distribution. + + ## Input Parameters + * Finding: (Required) Security Hub finding details JSON + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + + ## Output Parameters + * Remediation.Output + + ## Documentation Links + * [AWS FSBP v1.0.0 CloudFront.1](https://docs.aws.amazon.com/securityhub/latest/userguide/cloudfront-controls.html#cloudfront-1) +schemaVersion: '0.3' +assumeRole: '{{ AutomationAssumeRole }}' +parameters: + AutomationAssumeRole: + type: 'String' + description: '(Required) The ARN of the role that allows Automation to perform the actions on your behalf.' + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + Finding: + type: 'StringMap' + description: 'The input from the Orchestrator Step function for the CloudFront.1 finding' +outputs: +- 'ParseInput.AffectedObject' +- 'Remediation.Output' +mainSteps: +- name: 'ParseInput' + action: 'aws:executeScript' + outputs: + - Name: 'FindingId' + Selector: '$.Payload.finding.Id' + Type: 'String' + - Name: 'ProductArn' + Selector: '$.Payload.finding.ProductArn' + Type: 'String' + - Name: 'AffectedObject' + Selector: '$.Payload.object' + Type: 'StringMap' + - Name: 'CloudFrontDistribution' + Selector: '$.Payload.resource_id' + Type: 'String' + - Name: 'RemediationRegion' + Selector: '$.Payload.resource_region' + Type: 'String' + - Name: 'RemediationAccount' + Selector: '$.Payload.account_id' + Type: 'String' + inputs: + InputPayload: + Finding: '{{Finding}}' + parse_id_pattern: '^(arn:(?:aws|aws-us-gov|aws-cn):cloudfront::\d{12}:distribution\/([A-Z0-9]+))$' + expected_control_id: + - 'CloudFront.1' + Runtime: 'python3.8' + Handler: 'parse_event' + Script: |- + %%SCRIPT=common/parse_input.py%% +- name: 'Remediation' + action: 'aws:executeAutomation' + isEnd: false + inputs: + DocumentName: ASR-EnableCloudFrontDefaultRootObject + RuntimeParameters: + AutomationAssumeRole: 'arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/SO0111-EnableCloudFrontDefaultRootObject' + CloudFrontDistribution: '{{ ParseInput.CloudFrontDistribution }}' +- name: 'UpdateFinding' + action: 'aws:executeAwsApi' + inputs: + Service: 'securityhub' + Api: 'BatchUpdateFindings' + FindingIdentifiers: + - Id: '{{ ParseInput.FindingId }}' + ProductArn: '{{ ParseInput.ProductArn }}' + Note: + Text: 'Configured default root object for CloudFront distribution' + UpdatedBy: 'ASR-AFSBP_1.0.0_CloudFront.1' + Workflow: + Status: 'RESOLVED' + isEnd: true diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudFront.12.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudFront.12.yaml new file mode 100644 index 00000000..2bb76b5f --- /dev/null +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudFront.12.yaml @@ -0,0 +1,85 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +description: | + ### Document Name - ASR-AFSBP_1.0.0_CloudFront.12 + + ## What does this document do? + This document updates the origin domain on a given CloudFront distribution to prevent a malicious third party from creating the referenced bucket and serving their own content through your distribution. + + ## Input Parameters + * Finding: (Required) Security Hub finding details JSON + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + + ## Output Parameters + * Remediation.Output + + ## Documentation Links + * [AWS FSBP v1.0.0 CloudFront.12](https://docs.aws.amazon.com/securityhub/latest/userguide/cloudfront-controls.html#cloudfront-12) +schemaVersion: '0.3' +assumeRole: '{{ AutomationAssumeRole }}' +parameters: + AutomationAssumeRole: + type: 'String' + description: '(Required) The ARN of the role that allows Automation to perform the actions on your behalf.' + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + Finding: + type: 'StringMap' + description: 'The input from the Orchestrator Step function for the CloudFront.12 finding' +outputs: +- 'ParseInput.AffectedObject' +- 'Remediation.Output' +mainSteps: +- name: 'ParseInput' + action: 'aws:executeScript' + outputs: + - Name: 'FindingId' + Selector: '$.Payload.finding.Id' + Type: 'String' + - Name: 'ProductArn' + Selector: '$.Payload.finding.ProductArn' + Type: 'String' + - Name: 'AffectedObject' + Selector: '$.Payload.object' + Type: 'StringMap' + - Name: 'DistributionId' + Selector: '$.Payload.resource_id' + Type: 'String' + - Name: 'RemediationRegion' + Selector: '$.Payload.resource_region' + Type: 'String' + - Name: 'RemediationAccount' + Selector: '$.Payload.account_id' + Type: 'String' + inputs: + InputPayload: + Finding: '{{Finding}}' + parse_id_pattern: '^arn:(?:aws|aws-cn|aws-us-gov):cloudfront::[0-9]{12}:distribution\/([A-Z0-9]*)$' + expected_control_id: + - 'CloudFront.12' + Runtime: 'python3.8' + Handler: 'parse_event' + Script: |- + %%SCRIPT=common/parse_input.py%% +- name: 'Remediation' + action: 'aws:executeAutomation' + isEnd: false + inputs: + DocumentName: ASR-SetCloudFrontOriginDomain + RuntimeParameters: + AutomationAssumeRole: 'arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/SO0111-SetCloudFrontOriginDomain' + DistributionId: '{{ ParseInput.DistributionId }}' +- name: 'UpdateFinding' + action: 'aws:executeAwsApi' + inputs: + Service: 'securityhub' + Api: 'BatchUpdateFindings' + FindingIdentifiers: + - Id: '{{ ParseInput.FindingId }}' + ProductArn: '{{ ParseInput.ProductArn }}' + Note: + Text: 'Configured origin domain for CloudFront distribution' + UpdatedBy: 'ASR-AFSBP_1.0.0_CloudFront.12' + Workflow: + Status: 'RESOLVED' + isEnd: true diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudTrail.1.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudTrail.1.yaml index 184fd049..f5306630 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudTrail.1.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudTrail.1.yaml @@ -12,7 +12,7 @@ description: | * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. ## Documentation Links - * [AFSBP CloudTrail.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-1) + * [AWS FSBP CloudTrail.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-1) schemaVersion: "0.3" assumeRole: "{{ AutomationAssumeRole }}" diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudTrail.2.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudTrail.2.yaml index e6e7c8cf..26909c4b 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudTrail.2.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudTrail.2.yaml @@ -12,7 +12,7 @@ description: | * Remediation.Output - Output from the remediation ## Documentation Links - * [AFSBP CloudTrail.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-2) + * [AWS FSBP CloudTrail.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-2) schemaVersion: '0.3' assumeRole: '{{ AutomationAssumeRole }}' @@ -32,6 +32,10 @@ parameters: default: >- {{ssm:/Solutions/SO0111/CMK_REMEDIATION_ARN}} allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):kms:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:(?:(?:alias/[A-Za-z0-9/-_])|(?:key/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})))$' + RemediationRoleName: + type: String + default: "SO0111-EnableCloudTrailEncryption" + allowedPattern: '^[\w+=,.@-]+$' mainSteps: - @@ -53,6 +57,12 @@ mainSteps: - Name: TrailRegion Selector: $.Payload.resource_region Type: String + - Name: RemediationRegion + Selector: $.Payload.resource_region + Type: String + - Name: RemediationAccount + Selector: $.Payload.account_id + Type: String inputs: InputPayload: Finding: '{{Finding}}' @@ -69,11 +79,14 @@ mainSteps: action: 'aws:executeAutomation' inputs: DocumentName: ASR-EnableCloudTrailEncryption + TargetLocations: + - Accounts: [ '{{ParseInput.RemediationAccount}}' ] + Regions: [ '{{ParseInput.RemediationRegion}}' ] + ExecutionRoleName: '{{RemediationRoleName}}' RuntimeParameters: TrailRegion: '{{ParseInput.TrailRegion}}' TrailArn: '{{ParseInput.TrailArn}}' AutomationAssumeRole: 'arn:{{global:AWS_PARTITION}}:iam::{{global:ACCOUNT_ID}}:role/SO0111-EnableCloudTrailEncryption' - KMSKeyArn: '{{KMSKeyArn}}' - name: UpdateFinding action: 'aws:executeAwsApi' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudTrail.4.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudTrail.4.yaml index 3193afc9..809f6037 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudTrail.4.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudTrail.4.yaml @@ -15,7 +15,7 @@ description: | * Remediation.Output ## Documentation Links - * [AFSBP v1.0.0 CloudTrail.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-4) + * [AWS FSBP v1.0.0 CloudTrail.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-4) schemaVersion: '0.3' assumeRole: '{{ AutomationAssumeRole }}' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudTrail.5.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudTrail.5.yaml index 447374dd..2b4421c7 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudTrail.5.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_CloudTrail.5.yaml @@ -15,7 +15,7 @@ description: | * Remediation.Output - Remediation results ## Documentation Links - * [AFSBP v1.0.0 CloudTrail.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-5) + * [AWS FSBP v1.0.0 CloudTrail.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-5) schemaVersion: '0.3' assumeRole: '{{ AutomationAssumeRole }}' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_CodeBuild.2.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_CodeBuild.2.yaml index 88b3cb18..59f29c46 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_CodeBuild.2.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_CodeBuild.2.yaml @@ -15,7 +15,7 @@ description: | * Remediation.Output ## Documentation Links - * [AFSBP v1.0.0 CodeBuild.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-codebuild-2) + * [AWS FSBP v1.0.0 CodeBuild.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-codebuild-2) schemaVersion: '0.3' assumeRole: '{{ AutomationAssumeRole }}' outputs: diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_CodeBuild.5.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_CodeBuild.5.yaml new file mode 100644 index 00000000..7e7562bc --- /dev/null +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_CodeBuild.5.yaml @@ -0,0 +1,92 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +description: | + ### Document Name - ASR-AFSBP_1.0.0_CodeBuild.5 + + ## What does this document do? + This document removes CodeBuild project privileged mode to remove a build project's Docker container access to all devices. + + ## Input Parameters + * Finding: (Required) Security Hub finding details JSON + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + + ## Output Parameters + * Remediation.Output + + ## Documentation Links + * [AWS FSBP v1.0.0 CodeBuild.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-codebuild-5) +schemaVersion: '0.3' +assumeRole: '{{ AutomationAssumeRole }}' +outputs: + - ParseInput.AffectedObject + - Remediation.Output +parameters: + Finding: + type: StringMap + description: The input from the Orchestrator Step function for the CodeBuild.5 finding + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + RemediationRoleName: + type: String + default: "SO0111-RemoveCodeBuildPrivilegedMode" + allowedPattern: '^[\w+=,.@-]+$' +mainSteps: + - name: ParseInput + action: 'aws:executeScript' + outputs: + - Name: ProjectName + Selector: $.Payload.resource_id + Type: String + - Name: FindingId + Selector: $.Payload.finding_id + Type: String + - Name: ProductArn + Selector: $.Payload.product_arn + Type: String + - Name: AffectedObject + Selector: $.Payload.object + Type: StringMap + - Name: RemediationRegion + Selector: $.Payload.resource_region + Type: String + - Name: RemediationAccount + Selector: $.Payload.account_id + Type: String + inputs: + InputPayload: + Finding: '{{Finding}}' + parse_id_pattern: '^arn:(?:aws|aws-cn|aws-us-gov):codebuild:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:project/([A-Za-z0-9][A-Za-z0-9\-_]{1,254})$' + expected_control_id: [ 'CodeBuild.5' ] + Runtime: python3.8 + Handler: parse_event + Script: |- + %%SCRIPT=common/parse_input.py%% + - name: Remediation + action: 'aws:executeAutomation' + inputs: + DocumentName: ASR-RemoveCodeBuildPrivilegedMode + TargetLocations: + - Accounts: [ '{{ParseInput.RemediationAccount}}' ] + Regions: [ '{{ParseInput.RemediationRegion}}' ] + ExecutionRoleName: '{{RemediationRoleName}}' + RuntimeParameters: + ProjectName: '{{ ParseInput.ProjectName }}' + AutomationAssumeRole: 'arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/SO0111-RemoveCodeBuildPrivilegedMode' + - name: UpdateFinding + action: 'aws:executeAwsApi' + inputs: + Service: securityhub + Api: BatchUpdateFindings + FindingIdentifiers: + - Id: '{{ ParseInput.FindingId }}' + ProductArn: '{{ ParseInput.ProductArn }}' + Note: + Text: 'Removed CodeBuild privileged status.' + UpdatedBy: 'ASR-AFSBP_1.0.0_CodeBuild.5' + Workflow: + Status: RESOLVED + description: Update finding + isEnd: true diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_Config.1.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_Config.1.yaml index 7e8573bd..2c2120e1 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_Config.1.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_Config.1.yaml @@ -16,7 +16,7 @@ description: | * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. ## Documentation Links - * [AFSBP Config.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-config-1) + * [AWS FSBP Config.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-config-1) schemaVersion: "0.3" assumeRole: "{{ AutomationAssumeRole }}" diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.1.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.1.yaml index 254e4c69..52ab5783 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.1.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.1.yaml @@ -11,7 +11,7 @@ description: | * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. ## Documentation Links - * [AFSBP EC2.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-1) + * [AWS FSBP EC2.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-1) schemaVersion: '0.3' assumeRole: '{{ AutomationAssumeRole }}' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.15.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.15.yaml index 0842d424..377aed58 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.15.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.15.yaml @@ -16,7 +16,7 @@ description: | * Remediation.Output ## Documentation Links - * [AFSBP v1.0.0 EC2.15](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-15) + * [AWS FSBP v1.0.0 EC2.15](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-15) schemaVersion: '0.3' assumeRole: '{{ AutomationAssumeRole }}' parameters: diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.18.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.18.yaml new file mode 100644 index 00000000..3a553944 --- /dev/null +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.18.yaml @@ -0,0 +1,125 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +description: | + ### Document Name - ASR-AFSBP_1.0.0_EC2.18 + + ## What does this document do? + This document revokes inbound security group rules that allow unrestricted access to ports that are not authorized. + Authorized ports are listed in authorizedTcpPorts and authorizedUdpPorts parameters. + + ## Input Parameters + * Finding: (Required) Security Hub finding details JSON + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + * RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. + + ## Output Parameters + * Remediation.Output + + ## Documentation Links + * [AWS FSBP v1.0.0 EC2.18](https://docs.aws.amazon.com/securityhub/latest/userguide/ec2-controls.html#ec2-18) +schemaVersion: '0.3' +assumeRole: '{{ AutomationAssumeRole }}' +parameters: + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + Finding: + type: StringMap + description: The input from the Orchestrator Step function for the EC2.18 finding + RemediationRoleName: + type: 'String' + default: 'SO0111-RevokeUnauthorizedInboundRules' + allowedPattern: '^[\w+=,.@-]+$' + +outputs: + - ParseInput.AffectedObject + - Remediation.Output + +mainSteps: + - name: ParseInput + action: 'aws:executeScript' + outputs: + - Name: FindingId + Selector: $.Payload.finding.Id + Type: String + - Name: ProductArn + Selector: $.Payload.finding.ProductArn + Type: String + - Name: AffectedObject + Selector: $.Payload.object + Type: StringMap + - Name: SecurityGroupId + Selector: $.Payload.resource_id + Type: String + - Name: RemediationRegion + Selector: $.Payload.resource_region + Type: String + - Name: RemediationAccount + Selector: $.Payload.account_id + Type: String + - Name: SecHubInputParams + Selector: $.Payload.input_params + Type: StringMap + inputs: + InputPayload: + Finding: '{{Finding}}' + parse_id_pattern: '^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:security-group/(sg-[0-9a-f]*)$' + expected_control_id: + - 'EC2.18' + Runtime: python3.8 + Handler: parse_event + Script: |- + %%SCRIPT=common/parse_input.py%% + + - name: GetInputParams + action: 'aws:executeScript' + outputs: + - Name: authorizedUdpPorts + Selector: $.Payload.authorizedUdpPorts + Type: StringList + - Name: authorizedTcpPorts + Selector: $.Payload.authorizedTcpPorts + Type: StringList + inputs: + InputPayload: + SecHubInputParams: '{{ParseInput.SecHubInputParams}}' + DefaultParams: { + "authorizedTcpPorts": [ "80", "443" ], + "authorizedUdpPorts": [], + } + expected_control_id: [ 'EC2.18' ] + Runtime: python3.8 + Handler: get_input_params + Script: |- + %%SCRIPT=common/get_input_params.py%% + + - name: Remediation + action: 'aws:executeAutomation' + inputs: + DocumentName: ASR-RevokeUnauthorizedInboundRules + TargetLocations: + - Accounts: [ '{{ParseInput.RemediationAccount}}' ] + Regions: [ '{{ParseInput.RemediationRegion}}' ] + ExecutionRoleName: '{{ RemediationRoleName }}' + RuntimeParameters: + AutomationAssumeRole: 'arn:{{global:AWS_PARTITION}}:iam::{{global:ACCOUNT_ID}}:role/{{RemediationRoleName}}' + SecurityGroupId: '{{ParseInput.SecurityGroupId}}' + AuthorizedTcpPorts: '{{GetInputParams.authorizedTcpPorts}}' + AuthorizedUdpPorts: '{{GetInputParams.authorizedUdpPorts}}' + + - name: UpdateFinding + action: 'aws:executeAwsApi' + inputs: + Service: securityhub + Api: BatchUpdateFindings + FindingIdentifiers: + - Id: '{{ParseInput.FindingId}}' + ProductArn: '{{ParseInput.ProductArn}}' + Note: + Text: 'Revoked unrestricted inbound security group rules on unauthorized ports.' + UpdatedBy: 'ASR-AFSBP_1.0.0_EC2.18' + Workflow: + Status: RESOLVED + description: Update finding \ No newline at end of file diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.19.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.19.yaml new file mode 100644 index 00000000..8fee3f44 --- /dev/null +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.19.yaml @@ -0,0 +1,98 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +description: | + ### Document Name - ASR-AFSBP_1.0.0_EC2.19 + + ## What does this document do? + This document disables unrestricted access to high risk ports. + + ## Input Parameters + * Finding: (Required) Security Hub finding details JSON + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + * RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. + + ## Output Parameters + * Remediation.Output + + ## Documentation Links + * [AWS FSBP v1.0.0 EC2.19](https://docs.aws.amazon.com/securityhub/latest/userguide/ec2-controls.html#ec2-19) +schemaVersion: '0.3' +assumeRole: '{{ AutomationAssumeRole }}' +parameters: + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + Finding: + type: StringMap + description: The input from the Orchestrator Step function for the EC2.19 finding + RemediationRoleName: + type: 'String' + default: 'SO0111-DisableUnrestrictedAccessToHighRiskPorts' + allowedPattern: '^[\w+=,.@-]+$' + +outputs: + - ParseInput.AffectedObject + - Remediation.Output + +mainSteps: + - + name: ParseInput + action: 'aws:executeScript' + outputs: + - Name: FindingId + Selector: $.Payload.finding.Id + Type: String + - Name: ProductArn + Selector: $.Payload.finding.ProductArn + Type: String + - Name: AffectedObject + Selector: $.Payload.object + Type: StringMap + - Name: SecurityGroupId + Selector: $.Payload.resource_id + Type: String + - Name: RemediationRegion + Selector: $.Payload.resource_region + Type: String + - Name: RemediationAccount + Selector: $.Payload.account_id + Type: String + inputs: + InputPayload: + Finding: '{{Finding}}' + parse_id_pattern: '^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:security-group/(sg-[0-9a-f]*)$' + expected_control_id: + - 'EC2.19' + Runtime: python3.8 + Handler: parse_event + Script: |- + %%SCRIPT=common/parse_input.py%% + - + name: Remediation + action: 'aws:executeAutomation' + inputs: + DocumentName: ASR-DisableUnrestrictedAccessToHighRiskPorts + TargetLocations: + - Accounts: [ '{{ParseInput.RemediationAccount}}' ] + Regions: [ '{{ParseInput.RemediationRegion}}' ] + ExecutionRoleName: '{{ RemediationRoleName }}' + RuntimeParameters: + AutomationAssumeRole: 'arn:{{global:AWS_PARTITION}}:iam::{{global:ACCOUNT_ID}}:role/{{RemediationRoleName}}' + SecurityGroupId: '{{ParseInput.SecurityGroupId}}' + - + name: UpdateFinding + action: 'aws:executeAwsApi' + inputs: + Service: securityhub + Api: BatchUpdateFindings + FindingIdentifiers: + - Id: '{{ParseInput.FindingId}}' + ProductArn: '{{ParseInput.ProductArn}}' + Note: + Text: 'Disabled unrestricted access to high risk ports.' + UpdatedBy: 'ASR-AFSBP_1.0.0_EC2.19' + Workflow: + Status: RESOLVED + description: Update finding diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.2.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.2.yaml index b16ef16a..2d0ea8e5 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.2.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.2.yaml @@ -16,7 +16,7 @@ description: | * Remediation.Output - Output from AWSConfigRemediation-RemoveVPCDefaultSecurityGroupRules SSM doc ## Documentation Links - * [AFSBP EC2.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-2) + * [AWS FSBP EC2.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-2) schemaVersion: '0.3' assumeRole: '{{ AutomationAssumeRole }}' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.23.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.23.yaml new file mode 100644 index 00000000..f0c66823 --- /dev/null +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.23.yaml @@ -0,0 +1,98 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +description: | + ### Document Name - ASR-AFSBP_1.0.0_EC2.23 + + ## What does this document do? + This document turns off AutoAcceptSharedAttachments on a transit gateway to ensure that only authorized VPC attachment requests are accepted. + + ## Input Parameters + * Finding: (Required) Security Hub finding details JSON + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + * RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. + + ## Output Parameters + * Remediation.Output + + ## Documentation Links + * [AWS FSBP v1.0.0 EC2.23](https://docs.aws.amazon.com/securityhub/latest/userguide/ec2-controls.html#ec2-23) +schemaVersion: '0.3' +assumeRole: '{{ AutomationAssumeRole }}' +parameters: + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + Finding: + type: StringMap + description: The input from the Orchestrator Step function for the EC2.23 finding + RemediationRoleName: + type: 'String' + default: 'SO0111-DisableTGWAutoAcceptSharedAttachments' + allowedPattern: '^[\w+=,.@-]+$' + +outputs: + - ParseInput.AffectedObject + - Remediation.Output + +mainSteps: + - + name: ParseInput + action: 'aws:executeScript' + outputs: + - Name: FindingId + Selector: $.Payload.finding.Id + Type: String + - Name: ProductArn + Selector: $.Payload.finding.ProductArn + Type: String + - Name: AffectedObject + Selector: $.Payload.object + Type: StringMap + - Name: TransitGatewayId + Selector: $.Payload.resource_id + Type: String + - Name: RemediationRegion + Selector: $.Payload.resource_region + Type: String + - Name: RemediationAccount + Selector: $.Payload.account_id + Type: String + inputs: + InputPayload: + Finding: '{{Finding}}' + parse_id_pattern: '^arn:(?:aws|aws-cn|aws-us-gov):ec2:[a-z]{2}-[a-z]+-\d{1}:\d{12}:transit-gateway\/(tgw-[a-z0-9\-]+)$' + expected_control_id: + - 'EC2.23' + Runtime: python3.8 + Handler: parse_event + Script: |- + %%SCRIPT=common/parse_input.py%% + - + name: Remediation + action: 'aws:executeAutomation' + inputs: + DocumentName: ASR-DisableTGWAutoAcceptSharedAttachments + TargetLocations: + - Accounts: [ '{{ParseInput.RemediationAccount}}' ] + Regions: [ '{{ParseInput.RemediationRegion}}' ] + ExecutionRoleName: '{{ RemediationRoleName }}' + RuntimeParameters: + AutomationAssumeRole: 'arn:{{global:AWS_PARTITION}}:iam::{{global:ACCOUNT_ID}}:role/{{RemediationRoleName}}' + TransitGatewayId: '{{ParseInput.TransitGatewayId}}' + - + name: UpdateFinding + action: 'aws:executeAwsApi' + inputs: + Service: securityhub + Api: BatchUpdateFindings + FindingIdentifiers: + - Id: '{{ParseInput.FindingId}}' + ProductArn: '{{ParseInput.ProductArn}}' + Note: + Text: 'Disabled Transit Gateway from automatically accepting VPC attachment requests.' + UpdatedBy: 'ASR-AFSBP_1.0.0_EC2.23' + Workflow: + Status: RESOLVED + description: Update finding diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.4.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.4.yaml new file mode 100644 index 00000000..42f2c7a9 --- /dev/null +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.4.yaml @@ -0,0 +1,94 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +description: | + ### Document Name - ASR-AFSBP_1.0.0_EC2.4 + + ## What does this document do? + This document terminates an EC2 instance if it has been stopped for longer than the allowed number of days defined by the AllowedDays parameter. + + ## Input Parameters + * Finding: (Required) Security Hub finding details JSON + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + * RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. + + ## Documentation Links + * [AWS FSBP v1.0.0 EC2.4](https://docs.aws.amazon.com/securityhub/latest/userguide/ec2-controls.html#ec2-4) +schemaVersion: '0.3' +assumeRole: '{{ AutomationAssumeRole }}' +parameters: + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + Finding: + type: StringMap + description: The input from the Orchestrator Step function for the EC2.4 finding + RemediationRoleName: + type: 'String' + default: 'SO0111-TerminateEC2Instance' + allowedPattern: '^[\w+=,.@-]+$' + +outputs: + - ParseInput.AffectedObject + +mainSteps: + - name: ParseInput + action: 'aws:executeScript' + outputs: + - Name: FindingId + Selector: $.Payload.finding.Id + Type: String + - Name: ProductArn + Selector: $.Payload.finding.ProductArn + Type: String + - Name: AffectedObject + Selector: $.Payload.object + Type: StringMap + - Name: InstanceId + Selector: $.Payload.resource_id + Type: String + - Name: RemediationRegion + Selector: $.Payload.resource_region + Type: String + - Name: RemediationAccount + Selector: $.Payload.account_id + Type: String + inputs: + InputPayload: + Finding: '{{Finding}}' + parse_id_pattern: '^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:instance\/(i-[0-9a-f]*)$' + expected_control_id: + - 'EC2.4' + Runtime: python3.8 + Handler: parse_event + Script: |- + %%SCRIPT=common/parse_input.py%% + + - name: Remediation + action: 'aws:executeAutomation' + inputs: + DocumentName: AWS-TerminateEC2Instance + TargetLocations: + - Accounts: [ '{{ParseInput.RemediationAccount}}' ] + Regions: [ '{{ParseInput.RemediationRegion}}' ] + ExecutionRoleName: '{{ RemediationRoleName }}' + RuntimeParameters: + AutomationAssumeRole: 'arn:{{global:AWS_PARTITION}}:iam::{{global:ACCOUNT_ID}}:role/{{RemediationRoleName}}' + InstanceId: + - '{{ParseInput.InstanceId}}' + + - name: UpdateFinding + action: 'aws:executeAwsApi' + inputs: + Service: securityhub + Api: BatchUpdateFindings + FindingIdentifiers: + - Id: '{{ParseInput.FindingId}}' + ProductArn: '{{ParseInput.ProductArn}}' + Note: + Text: 'Terminated EC2 instance.' + UpdatedBy: 'ASR-AFSBP_1.0.0_EC2.4' + Workflow: + Status: RESOLVED + description: Update finding diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.6.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.6.yaml index d59ebee1..bc39ad59 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.6.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.6.yaml @@ -15,7 +15,7 @@ description: | * Remediation.Output - Remediation results ## Documentation Links - * [AFSBP EC2.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-6) + * [AWS FSBP EC2.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-6) schemaVersion: '0.3' assumeRole: '{{ AutomationAssumeRole }}' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.7.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.7.yaml index 41fb5caf..6540e956 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.7.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.7.yaml @@ -12,7 +12,7 @@ description: | * Remediation.Output ## Documentation Links - * [AFSBP EC2.7](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-7) + * [AWS FSBP EC2.7](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-7) schemaVersion: "0.3" assumeRole: "{{ AutomationAssumeRole }}" parameters: diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.8.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.8.yaml new file mode 100644 index 00000000..09611eaa --- /dev/null +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_EC2.8.yaml @@ -0,0 +1,96 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +description: | + ### Document Name - ASR-AFSBP_1.0.0_EC2.8 + ## What does this document do? + This document enables IMDSv2 on an EC2 instance for an AWS account in the current region by calling another SSM document + ## Input Parameters + * Finding: (Required) Security Hub finding details JSON + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + ## Output Parameters + * Remediation.Output + + ## Documentation Links + * [AWS FSBP EC2.8](https://docs.aws.amazon.com/securityhub/latest/userguide/ec2-controls.html#ec2-8) +schemaVersion: "0.3" +assumeRole: "{{ AutomationAssumeRole }}" +parameters: + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + Finding: + type: StringMap + description: The input from the Orchestrator Step function for the EC2.8 finding + RemediationRoleName: + type: String + default: "SO0111-EnableIMDSV2OnInstance" + allowedPattern: '^[\w+=,.@-]+$' + +outputs: + - ParseInput.AffectedObject + - Remediation.Output +mainSteps: + - + name: ParseInput + action: 'aws:executeScript' + outputs: + - Name: FindingId + Selector: $.Payload.finding_id + Type: String + - Name: ProductArn + Selector: $.Payload.product_arn + Type: String + - Name: AffectedObject + Selector: $.Payload.object + Type: StringMap + - Name: InstanceARN + Selector: $.Payload.resource_id + Type: String + - Name: RemediationRegion + Selector: $.Payload.resource_region + Type: String + - Name: RemediationAccount + Selector: $.Payload.account_id + Type: String + inputs: + InputPayload: + Finding: '{{Finding}}' + parse_id_pattern: '' + expected_control_id: + - 'EC2.8' + Runtime: python3.8 + Handler: parse_event + Script: |- + %%SCRIPT=common/parse_input.py%% + isEnd: false + + - + name: Remediation + action: 'aws:executeAutomation' + inputs: + DocumentName: ASR-EnableIMDSV2OnInstance + TargetLocations: + - Accounts: [ '{{ParseInput.RemediationAccount}}' ] + Regions: [ '{{ParseInput.RemediationRegion}}' ] + ExecutionRoleName: '{{ RemediationRoleName }}' + RuntimeParameters: + AutomationAssumeRole: 'arn:{{global:AWS_PARTITION}}:iam::{{global:ACCOUNT_ID}}:role/{{RemediationRoleName}}' + InstanceARN: '{{ParseInput.InstanceARN}}' + - + name: UpdateFinding + action: 'aws:executeAwsApi' + inputs: + Service: securityhub + Api: BatchUpdateFindings + FindingIdentifiers: + - Id: '{{ParseInput.FindingId}}' + ProductArn: '{{ParseInput.ProductArn}}' + Note: + Text: 'Enabled IMDSv2 on Instance' + UpdatedBy: 'ASR-AFSBP_1.0.0_EC2.8' + Workflow: + Status: RESOLVED + description: Update finding + isEnd: true \ No newline at end of file diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_ECR.1.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_ECR.1.yaml new file mode 100644 index 00000000..79cb92fb --- /dev/null +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_ECR.1.yaml @@ -0,0 +1,98 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +description: | + ### Document Name - ASR-AFSBP_1.0.0_ECR.1 + + ## What does this document do? + This document enables image scanning configuration on a private ECR repository. + + ## Input Parameters + * Finding: (Required) Security Hub finding details JSON + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + * RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. + + ## Output Parameters + * Remediation.Output + + ## Documentation Links + * [AWS FSBP v1.0.0 ECR.1](https://docs.aws.amazon.com/securityhub/latest/userguide/ecr-controls.html#ecr-1) +schemaVersion: '0.3' +assumeRole: '{{ AutomationAssumeRole }}' +parameters: + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + Finding: + type: StringMap + description: The input from the Orchestrator Step function for the ECR.1 finding + RemediationRoleName: + type: 'String' + default: 'SO0111-EnablePrivateRepositoryScanning' + allowedPattern: '^[\w+=,.@-]+$' + +outputs: + - ParseInput.AffectedObject + - Remediation.Output + +mainSteps: + - + name: ParseInput + action: 'aws:executeScript' + outputs: + - Name: FindingId + Selector: $.Payload.finding.Id + Type: String + - Name: ProductArn + Selector: $.Payload.finding.ProductArn + Type: String + - Name: AffectedObject + Selector: $.Payload.object + Type: StringMap + - Name: RepositoryName + Selector: $.Payload.resource_id + Type: String + - Name: RemediationRegion + Selector: $.Payload.resource_region + Type: String + - Name: RemediationAccount + Selector: $.Payload.account_id + Type: String + inputs: + InputPayload: + Finding: '{{Finding}}' + parse_id_pattern: '^arn:(?:aws|aws-cn|aws-us-gov):ecr:[a-z]{2}-[a-z]+-\d{1}:\d{12}:repository\/([a-z0-9._\/\-]+)$' + expected_control_id: + - 'ECR.1' + Runtime: python3.8 + Handler: parse_event + Script: |- + %%SCRIPT=common/parse_input.py%% + - + name: Remediation + action: 'aws:executeAutomation' + inputs: + DocumentName: ASR-EnablePrivateRepositoryScanning + TargetLocations: + - Accounts: [ '{{ParseInput.RemediationAccount}}' ] + Regions: [ '{{ParseInput.RemediationRegion}}' ] + ExecutionRoleName: '{{ RemediationRoleName }}' + RuntimeParameters: + AutomationAssumeRole: 'arn:{{global:AWS_PARTITION}}:iam::{{global:ACCOUNT_ID}}:role/{{RemediationRoleName}}' + RepositoryName: '{{ParseInput.RepositoryName}}' + - + name: UpdateFinding + action: 'aws:executeAwsApi' + inputs: + Service: securityhub + Api: BatchUpdateFindings + FindingIdentifiers: + - Id: '{{ParseInput.FindingId}}' + ProductArn: '{{ParseInput.ProductArn}}' + Note: + Text: 'Enabled image scanning on private repository.' + UpdatedBy: 'ASR-AFSBP_1.0.0_ECR.1' + Workflow: + Status: RESOLVED + description: Update finding diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_GuardDuty.1.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_GuardDuty.1.yaml new file mode 100644 index 00000000..d73979c3 --- /dev/null +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_GuardDuty.1.yaml @@ -0,0 +1,93 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +description: | + ### Document Name - ASR-AFSBP_1.0.0_GuardDuty.1 + + ## What does this document do? + This document enables Amazon GuardDuty. + + ## Input Parameters + * Finding: (Required) Security Hub finding details JSON + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + * RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. + + ## Output Parameters + * Remediation.Output + + ## Documentation Links + * [AWS FSBP v1.0.0 GuardDuty.1](https://docs.aws.amazon.com/securityhub/latest/userguide/guardduty-controls.html#guardduty-1) +schemaVersion: '0.3' +assumeRole: '{{ AutomationAssumeRole }}' +parameters: + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + Finding: + type: StringMap + description: The input from the Orchestrator Step function for the GuardDuty.1 finding + RemediationRoleName: + type: 'String' + default: 'SO0111-EnableGuardDuty' + allowedPattern: '^[\w+=,.@-]+$' + +outputs: + - ParseInput.AffectedObject + - Remediation.Output + +mainSteps: + - + name: ParseInput + action: 'aws:executeScript' + outputs: + - Name: FindingId + Selector: $.Payload.finding.Id + Type: String + - Name: ProductArn + Selector: $.Payload.finding.ProductArn + Type: String + - Name: AffectedObject + Selector: $.Payload.object + Type: StringMap + - Name: SubnetARN + Selector: $.Payload.resource_id + Type: String + - Name: RemediationRegion + Selector: $.Payload.resource_region + Type: String + - Name: RemediationAccount + Selector: $.Payload.account_id + Type: String + inputs: + InputPayload: + Finding: '{{Finding}}' + parse_id_pattern: '' + expected_control_id: + - 'GuardDuty.1' + Runtime: python3.8 + Handler: parse_event + Script: |- + %%SCRIPT=common/parse_input.py%% + - + name: Remediation + action: 'aws:executeAutomation' + inputs: + DocumentName: ASR-EnableGuardDuty + RuntimeParameters: + AutomationAssumeRole: 'arn:{{global:AWS_PARTITION}}:iam::{{global:ACCOUNT_ID}}:role/{{RemediationRoleName}}' + - + name: UpdateFinding + action: 'aws:executeAwsApi' + inputs: + Service: securityhub + Api: BatchUpdateFindings + FindingIdentifiers: + - Id: '{{ParseInput.FindingId}}' + ProductArn: '{{ParseInput.ProductArn}}' + Note: + Text: 'Enabled Amazon GuardDuty.' + UpdatedBy: 'ASR-AFSBP_1.0.0_GuardDuty.1' + Workflow: + Status: RESOLVED + description: Update finding diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_IAM.3.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_IAM.3.yaml index 5b2ccc84..309b9cd6 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_IAM.3.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_IAM.3.yaml @@ -15,7 +15,7 @@ description: | * Remediation.Output ## Documentation Links - * [AFSBP v1.0.0 IAM.3](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-iam-3) + * [AWS FSBP v1.0.0 IAM.3](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-iam-3) schemaVersion: '0.3' assumeRole: '{{ AutomationAssumeRole }}' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_IAM.7.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_IAM.7.yaml index 823bac0a..0196832b 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_IAM.7.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_IAM.7.yaml @@ -8,7 +8,7 @@ description: | This document establishes a default password policy. ## Security Standards and Controls - * AFSBP IAM.7 + * AWS FSBP IAM.7 ## Input Parameters * Finding: (Required) Security Hub finding details JSON @@ -17,7 +17,7 @@ description: | * Remediation.Output ## Documentation Links - * [AFSBP IAM.7](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-iam-7) + * [AWS FSBP IAM.7](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-iam-7) schemaVersion: '0.3' assumeRole: '{{ AutomationAssumeRole }}' @@ -46,6 +46,9 @@ mainSteps: - Name: AffectedObject Selector: $.Payload.object Type: StringMap + - Name: SecHubInputParams + Selector: $.Payload.input_params + Type: StringMap inputs: InputPayload: Finding: '{{Finding}}' @@ -56,21 +59,70 @@ mainSteps: Script: |- %%SCRIPT=common/parse_input.py%% isEnd: false + - name: GetInputParams + action: 'aws:executeScript' + outputs: + - Name: AllowUsersToChangePassword + Selector: $.Payload.AllowUsersToChangePassword + Type: Boolean + - Name: HardExpiry + Selector: $.Payload.HardExpiry + Type: Boolean + - Name: MaxPasswordAge + Selector: $.Payload.MaxPasswordAge + Type: Integer + - Name: MinimumPasswordLength + Selector: $.Payload.MinimumPasswordLength + Type: Integer + - Name: RequireSymbols + Selector: $.Payload.RequireSymbols + Type: Boolean + - Name: RequireNumbers + Selector: $.Payload.RequireNumbers + Type: Boolean + - Name: RequireUppercaseCharacters + Selector: $.Payload.RequireUppercaseCharacters + Type: Boolean + - Name: RequireLowercaseCharacters + Selector: $.Payload.RequireLowercaseCharacters + Type: Boolean + - Name: PasswordReusePrevention + Selector: $.Payload.PasswordReusePrevention + Type: Integer + inputs: + InputPayload: + SecHubInputParams: '{{ParseInput.SecHubInputParams}}' + DefaultParams: { + "AllowUsersToChangePassword": True, + "HardExpiry": True, + "MaxPasswordAge": 90, + "MinimumPasswordLength": 14, + "RequireSymbols": True, + "RequireNumbers": True, + "RequireUppercaseCharacters": True, + "RequireLowercaseCharacters": True, + "PasswordReusePrevention": 24 + } + expected_control_id: [ 'IAM.7' ] + Runtime: python3.8 + Handler: get_input_params + Script: |- + %%SCRIPT=common/get_input_params.py%% - name: Remediation action: 'aws:executeAutomation' isEnd: false inputs: DocumentName: ASR-SetIAMPasswordPolicy RuntimeParameters: - AllowUsersToChangePassword: True - HardExpiry: True - MaxPasswordAge: 90 - MinimumPasswordLength: 14 - RequireSymbols: True - RequireNumbers: True - RequireUppercaseCharacters: True - RequireLowercaseCharacters: True - PasswordReusePrevention: 24 + AllowUsersToChangePassword: '{{GetInputParams.AllowUsersToChangePassword}}' + HardExpiry: '{{GetInputParams.HardExpiry}}' + MaxPasswordAge: '{{GetInputParams.MaxPasswordAge}}' + MinimumPasswordLength: '{{GetInputParams.MinimumPasswordLength}}' + RequireSymbols: '{{GetInputParams.RequireSymbols}}' + RequireNumbers: '{{GetInputParams.RequireNumbers}}' + RequireUppercaseCharacters: '{{GetInputParams.RequireUppercaseCharacters}}' + RequireLowercaseCharacters: '{{GetInputParams.RequireLowercaseCharacters}}' + PasswordReusePrevention: '{{GetInputParams.PasswordReusePrevention}}' AutomationAssumeRole: 'arn:{{global:AWS_PARTITION}}:iam::{{global:ACCOUNT_ID}}:role/SO0111-SetIAMPasswordPolicy' - name: UpdateFinding diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_IAM.8.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_IAM.8.yaml index 897c61b4..e7b5e938 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_IAM.8.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_IAM.8.yaml @@ -17,7 +17,7 @@ description: | SEE AWSConfigRemediation-RevokeUnusedIAMUserCredentials ## Documentation Links - * [AFSBP IAM.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-iam-8) + * [AWS FSBP IAM.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-iam-8) schemaVersion: '0.3' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_Lambda.1.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_Lambda.1.yaml index 2fa9cb29..9a288e82 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_Lambda.1.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_Lambda.1.yaml @@ -14,7 +14,7 @@ description: | * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. ## Documentation Links - * [AFSBP Lambda.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-lambda-1) + * [AWS FSBP Lambda.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-lambda-1) schemaVersion: '0.3' assumeRole: '{{ AutomationAssumeRole }}' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.1.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.1.yaml index f2f8f76f..0827e938 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.1.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.1.yaml @@ -11,7 +11,7 @@ description: | * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. ## Documentation Links - * [AFSBP RDS.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-1) + * [AWS FSBP RDS.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-1) schemaVersion: '0.3' assumeRole: '{{ AutomationAssumeRole }}' outputs: @@ -61,7 +61,7 @@ mainSteps: inputs: InputPayload: Finding: '{{Finding}}' - parse_id_pattern: '^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:(cluster-snapshot|snapshot):([a-zA-Z](?:[0-9a-zA-Z]+-)*[0-9a-zA-Z]+)$' + parse_id_pattern: '^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:(cluster-snapshot|snapshot):([a-zA-Z][0-9a-zA-Z]*(?:-[0-9a-zA-Z]+)*)$' resource_index: 2 expected_control_id: - 'RDS.1' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.13.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.13.yaml index 6d34bea6..6c17d070 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.13.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.13.yaml @@ -15,7 +15,7 @@ description: | * Remediation.Output - The standard HTTP response from the ModifyDBInstance API. ## Documentation Links - * [AFSBP RDS.13](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-13) + * [AWS FSBP RDS.13](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-13) schemaVersion: "0.3" assumeRole: "{{ AutomationAssumeRole }}" diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.16.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.16.yaml index 06547fcd..ae933447 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.16.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.16.yaml @@ -15,7 +15,7 @@ description: | * Remediation.Output - The standard HTTP response from the ModifyDBCluster API. ## Documentation Links - * [AFSBP RDS.16](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-16) + * [AWS FSBP RDS.16](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-16) schemaVersion: "0.3" assumeRole: "{{ AutomationAssumeRole }}" diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.2.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.2.yaml index 4ddb0638..c774f0ab 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.2.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.2.yaml @@ -11,7 +11,7 @@ description: | * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. ## Documentation Links - * [AFSBP RDS.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-2) + * [AWS FSBP RDS.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-2) ## Troubleshooting * ModifyDBInstance isn't supported for a DB instance in a Multi-AZ DB Cluster. diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.4.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.4.yaml index 474b0730..b81a66be 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.4.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.4.yaml @@ -15,7 +15,7 @@ description: | * KMSKeyId: (Optional) ID, ARN or Alias for the AWS KMS Customer-Managed Key (CMK) to use to encrypt the snapshot. ## Documentation Links - * [AFSBP RDS.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-4) + * [AWS FSBP RDS.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-4) assumeRole: '{{AutomationAssumeRole}}' outputs: - 'Remediation.Output' @@ -68,7 +68,7 @@ mainSteps: inputs: InputPayload: Finding: '{{Finding}}' - parse_id_pattern: '^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:((?:cluster-)?snapshot|dbclustersnapshot):((?:rds:)?((?!.*--.*)(?!.*-$)[a-zA-Z][a-zA-Z0-9-]{0,254}))$' + parse_id_pattern: '^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:((?:cluster-)?snapshot|dbclustersnapshot):((?:rds:|awsbackup:)?((?!.*--.*)(?!.*-$)[a-zA-Z][a-zA-Z0-9-]{0,254}))$' resource_index: 2 expected_control_id: - 'RDS.4' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.5.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.5.yaml index 340b73fd..43b9e7d3 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.5.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.5.yaml @@ -14,7 +14,7 @@ description: | * RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. ## Documentation Links - * [AFSBP RDS.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-5) + * [AWS FSBP RDS.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-5) assumeRole: '{{AutomationAssumeRole}}' outputs: - 'Remediation.Output' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.6.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.6.yaml index af11df6c..11934449 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.6.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.6.yaml @@ -15,7 +15,7 @@ description: | * VerifyRemediation.Output - The standard HTTP response from the ModifyDBInstance API. ## Documentation Links - * [AFSBP RDS.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-6) + * [AWS FSBP RDS.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-6) schemaVersion: "0.3" assumeRole: "{{ AutomationAssumeRole }}" parameters: diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.7.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.7.yaml index 0d9deaf1..0d89822f 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.7.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.7.yaml @@ -15,7 +15,7 @@ description: | * Remediation.Output - The standard HTTP response from the ModifyDBCluster API. ## Documentation Links - * [AFSBP RDS.7](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-7) + * [AWS FSBP RDS.7](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-7) schemaVersion: "0.3" assumeRole: "{{ AutomationAssumeRole }}" diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.8.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.8.yaml index bc12e4ed..711c3598 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.8.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_RDS.8.yaml @@ -14,7 +14,7 @@ description: | * RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. ## Documentation Links - * [AFSBP RDS.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-8) + * [AWS FSBP RDS.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-8) assumeRole: '{{AutomationAssumeRole}}' outputs: - 'Remediation.Output' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_Redshift.1.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_Redshift.1.yaml index 567a2ac1..f14404de 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_Redshift.1.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_Redshift.1.yaml @@ -14,7 +14,7 @@ description: | * RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. ## Documentation Links - * [AFSBP Redshift.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-redshift-1) + * [AWS FSBP Redshift.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-redshift-1) assumeRole: '{{AutomationAssumeRole}}' outputs: - 'Remediation.Output' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_Redshift.3.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_Redshift.3.yaml index 8389a6b3..46376eed 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_Redshift.3.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_Redshift.3.yaml @@ -14,7 +14,7 @@ description: | * RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. ## Documentation Links - * [AFSBP Redshift.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-redshift-3) + * [AWS FSBP Redshift.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-redshift-3) assumeRole: '{{AutomationAssumeRole}}' outputs: - 'Remediation.Output' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_Redshift.4.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_Redshift.4.yaml index 5c36cd83..8a86b689 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_Redshift.4.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_Redshift.4.yaml @@ -14,7 +14,7 @@ description: | * RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. ## Documentation Links - * [AFSBP Redshift.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-redshift-1) + * [AWS FSBP Redshift.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-redshift-1) assumeRole: '{{AutomationAssumeRole}}' outputs: - 'Remediation.Output' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_Redshift.6.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_Redshift.6.yaml index 48814a7f..e9798f7f 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_Redshift.6.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_Redshift.6.yaml @@ -14,7 +14,7 @@ description: | * RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. ## Documentation Links - * [AFSBP Redshift.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-redshift-6) + * [AWS FSBP Redshift.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-redshift-6) assumeRole: '{{AutomationAssumeRole}}' outputs: - 'Remediation.Output' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.1.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.1.yaml index aa909832..da23141c 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.1.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.1.yaml @@ -15,7 +15,7 @@ description: | * Remediation.Output ## Documentation Links - * [AFSBP v1.0.0 S3.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-1) + * [AWS FSBP v1.0.0 S3.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-1) schemaVersion: '0.3' assumeRole: '{{ AutomationAssumeRole }}' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.11.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.11.yaml new file mode 100644 index 00000000..e1f1161a --- /dev/null +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.11.yaml @@ -0,0 +1,125 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +description: | + ### Document Name - ASR-AFSBP_1.0.0_S3.11 + + ## What does this document do? + Configures event notification for an S3 bucket. + + ## Input Parameters + * Finding: (Required) Security Hub finding details JSON + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + + ## Output Parameters + * Remediation.Output - Remediation results + + ## Documentation Links + * [AWS FSBP v1.0.0 S3.11](https://docs.aws.amazon.com/securityhub/latest/userguide/s3-controls.html#s3-11) + +schemaVersion: '0.3' +assumeRole: '{{ AutomationAssumeRole }}' +outputs: + - ParseInput.AffectedObject + - Remediation.Output +parameters: + Finding: + type: StringMap + description: The input from the Orchestrator Step function for the S3.11 finding + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + RemediationRoleName: + type: String + default: "SO0111-EnableBucketEventNotifications" + allowedPattern: '^[\w+=,.@-]+$' + +mainSteps: + - + name: ParseInput + action: 'aws:executeScript' + outputs: + - Name: BucketName + Selector: $.Payload.resource_id + Type: String + - Name: FindingId + Selector: $.Payload.finding_id + Type: String + - Name: ProductArn + Selector: $.Payload.product_arn + Type: String + - Name: AffectedObject + Selector: $.Payload.object + Type: StringMap + - Name: AccountId + Selector: $.Payload.account_id + Type: String + - Name: SecHubInputParams + Selector: $.Payload.input_params + Type: StringMap + inputs: + InputPayload: + Finding: '{{Finding}}' + parse_id_pattern: '^arn:(?:aws|aws-cn|aws-us-gov):s3:::([A-Za-z0-9.-]{3,63})$' + expected_control_id: [ 'S3.11' ] + Runtime: python3.8 + Handler: parse_event + Script: |- + %%SCRIPT=common/parse_input.py%% + - + name: GetInputParams + action: 'aws:executeScript' + outputs: + - Name: eventTypes + Selector: $.Payload.eventTypes + Type: StringList + inputs: + InputPayload: + SecHubInputParams: '{{ParseInput.SecHubInputParams}}' + DefaultParams: { + "eventTypes": [ + "s3:ReducedRedundancyLostObject", + "s3:ObjectCreated:*", + "s3:ObjectRemoved:*", + "s3:ObjectRestore:*", + "s3:Replication:*", + "s3:LifecycleExpiration:*", + "s3:LifecycleTransition", + "s3:IntelligentTiering", + "s3:ObjectTagging:*", + "s3:ObjectAcl:Put", + ], + } + expected_control_id: [ 'S3.11' ] + Runtime: python3.8 + Handler: get_input_params + Script: |- + %%SCRIPT=common/get_input_params.py%% + + - name: Remediation + action: 'aws:executeAutomation' + isEnd: false + inputs: + DocumentName: ASR-EnableBucketEventNotifications + RuntimeParameters: + BucketName: '{{ParseInput.BucketName}}' + AccountId: '{{ParseInput.AccountId}}' + EventTypes: '{{GetInputParams.eventTypes}}' + AutomationAssumeRole: 'arn:{{global:AWS_PARTITION}}:iam::{{global:ACCOUNT_ID}}:role/{{RemediationRoleName}}' + + - name: UpdateFinding + action: 'aws:executeAwsApi' + inputs: + Service: securityhub + Api: BatchUpdateFindings + FindingIdentifiers: + - Id: '{{ParseInput.FindingId}}' + ProductArn: '{{ParseInput.ProductArn}}' + Note: + Text: 'Configured event notification to {{ParseInput.BucketName}}' + UpdatedBy: 'ASR-AFSBP_1.0.0_S3.11' + Workflow: + Status: RESOLVED + description: Update finding + isEnd: true diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.13.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.13.yaml new file mode 100644 index 00000000..82d7a424 --- /dev/null +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.13.yaml @@ -0,0 +1,120 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +description: | + ### Document Name - ASR-AFSBP_1.0.0_S3.13 + + ## What does this document do? + This document sets an example lifecycle policy that transfers objects greater than 10 GB to S3 Intelligent Tiering after 90 days. + It is recommended to set lifecycle policies appropriate for the objects stored in your S3 bucket. + + ## Input Parameters + * Finding: (Required) Security Hub finding details JSON + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + + ## Output Parameters + * Remediation.Output - Remediation results + + ## Documentation Links + * [AWS FSBP v1.0.0 S3.13](https://docs.aws.amazon.com/securityhub/latest/userguide/s3-controls.html#s3-13) + +schemaVersion: '0.3' +assumeRole: '{{ AutomationAssumeRole }}' +outputs: + - ParseInput.AffectedObject + - Remediation.Output +parameters: + Finding: + type: StringMap + description: The input from the Orchestrator Step function for the S3.13 finding + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + RemediationRoleName: + type: String + default: "SO0111-SetS3LifecyclePolicy" + allowedPattern: '^[\w+=,.@-]+$' + +mainSteps: + - + name: ParseInput + action: 'aws:executeScript' + outputs: + - Name: BucketName + Selector: $.Payload.resource_id + Type: String + - Name: FindingId + Selector: $.Payload.finding_id + Type: String + - Name: ProductArn + Selector: $.Payload.product_arn + Type: String + - Name: AffectedObject + Selector: $.Payload.object + Type: StringMap + - Name: SecHubInputParams + Selector: $.Payload.input_params + Type: StringMap + inputs: + InputPayload: + Finding: '{{Finding}}' + parse_id_pattern: '^arn:(?:aws|aws-cn|aws-us-gov):s3:::([A-Za-z0-9.-]{3,63})$' + expected_control_id: [ 'S3.13' ] + Runtime: python3.8 + Handler: parse_event + Script: |- + %%SCRIPT=common/parse_input.py%% + - name: GetInputParams + action: 'aws:executeScript' + outputs: + - Name: targetTransitionDays + Selector: $.Payload.targetTransitionDays + Type: Integer + - Name: targetExpirationDays + Selector: $.Payload.targetExpirationDays + Type: Integer + - Name: targetTransitionStorageClass + Selector: $.Payload.targetTransitionStorageClass + Type: String + inputs: + InputPayload: + SecHubInputParams: '{{ParseInput.SecHubInputParams}}' + DefaultParams: { + "targetTransitionDays": 30, + "targetExpirationDays": 0, + "targetTransitionStorageClass": "INTELLIGENT_TIERING" + } + expected_control_id: [ 'S3.13' ] + Runtime: python3.8 + Handler: get_input_params + Script: |- + %%SCRIPT=common/get_input_params.py%% + + - name: Remediation + action: 'aws:executeAutomation' + isEnd: false + inputs: + DocumentName: ASR-SetS3LifecyclePolicy + RuntimeParameters: + BucketName: '{{ParseInput.BucketName}}' + AutomationAssumeRole: 'arn:{{global:AWS_PARTITION}}:iam::{{global:ACCOUNT_ID}}:role/{{RemediationRoleName}}' + TargetTransitionDays: '{{GetInputParams.targetTransitionDays}}' + TargetExpirationDays: '{{GetInputParams.targetExpirationDays}}' + TargetTransitionStorageClass: '{{GetInputParams.targetTransitionStorageClass}}' + + - name: UpdateFinding + action: 'aws:executeAwsApi' + inputs: + Service: securityhub + Api: BatchUpdateFindings + FindingIdentifiers: + - Id: '{{ParseInput.FindingId}}' + ProductArn: '{{ParseInput.ProductArn}}' + Note: + Text: 'Set example lifecycle policy for {{ParseInput.BucketName}}' + UpdatedBy: 'ASR-AFSBP_1.0.0_S3.13' + Workflow: + Status: RESOLVED + description: Update finding + isEnd: true diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.2.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.2.yaml index a9d3ad41..38a5f044 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.2.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.2.yaml @@ -15,9 +15,9 @@ description: | * Remediation.Output ## Documentation Links - * [AFSBP v1.0.0 S3.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-2) - * [AFSBP v1.0.0 S3.3](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-3) - * [AFSBP v1.0.0 S3.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-8) + * [AWS FSBP v1.0.0 S3.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-2) + * [AWS FSBP v1.0.0 S3.3](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-3) + * [AWS FSBP v1.0.0 S3.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-8) schemaVersion: '0.3' assumeRole: '{{ AutomationAssumeRole }}' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.4.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.4.yaml index bb1711ba..d7bc8b49 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.4.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.4.yaml @@ -15,7 +15,7 @@ description: | * Remediation.Output ## Documentation Links - * [AFSBP v1.0.0 S3.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-4) + * [AWS FSBP v1.0.0 S3.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-4) schemaVersion: '0.3' assumeRole: '{{ AutomationAssumeRole }}' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.5.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.5.yaml index 18ce7352..4d5d4a21 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.5.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.5.yaml @@ -15,7 +15,7 @@ description: | * Remediation.Output ## Documentation Links - * [AFSBP v1.0.0 S3.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-5) + * [AWS FSBP v1.0.0 S3.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-5) schemaVersion: '0.3' assumeRole: '{{ AutomationAssumeRole }}' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.6.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.6.yaml index 3e8618da..197f9087 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.6.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.6.yaml @@ -15,7 +15,7 @@ description: | * Remediation.Output ## Documentation Links - * [AFSBP v1.0.0 S3.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-6) + * [AWS FSBP v1.0.0 S3.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-6) schemaVersion: '0.3' assumeRole: '{{ AutomationAssumeRole }}' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.9.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.9.yaml new file mode 100644 index 00000000..87f3fff2 --- /dev/null +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_S3.9.yaml @@ -0,0 +1,103 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +description: | + ### Document Name - ASR-AFSBP_1.0.0_S3.9 + + ## What does this document do? + Configures access logging for an S3 bucket. + + ## Input Parameters + * Finding: (Required) Security Hub finding details JSON + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + + ## Output Parameters + * Remediation.Output - Remediation results + + ## Documentation Links + * [AWS FSBP v1.0.0 S3.9](https://docs.aws.amazon.com/securityhub/latest/userguide/s3-controls.html#s3-9) + +schemaVersion: '0.3' +assumeRole: '{{ AutomationAssumeRole }}' +outputs: + - ParseInput.AffectedObject + - Remediation.Output +parameters: + Finding: + type: StringMap + description: The input from the Orchestrator Step function for the S3.9 finding + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + RemediationRoleName: + type: String + default: "SO0111-S3EnableBucketAccessLogging" + allowedPattern: '^[\w+=,.@-]+$' + +mainSteps: + - + name: ParseInput + action: 'aws:executeScript' + outputs: + - Name: CloudTrailBucket + Selector: $.Payload.resource_id + Type: String + - Name: AffectedObject + Selector: $.Payload.object + Type: StringMap + - Name: FindingId + Selector: $.Payload.finding.Id + Type: String + - Name: ProductArn + Selector: $.Payload.finding.ProductArn + Type: String + inputs: + InputPayload: + Finding: '{{Finding}}' + parse_id_pattern: '^arn:(?:aws|aws-cn|aws-us-gov):s3:::([A-Za-z0-9.-]{3,63})$' + expected_control_id: [ 'S3.9' ] + Runtime: python3.8 + Handler: parse_event + Script: |- + %%SCRIPT=common/parse_input.py%% + + - name: CreateAccessLoggingBucket + action: 'aws:executeAutomation' + isEnd: false + inputs: + DocumentName: ASR-CreateAccessLoggingBucket + RuntimeParameters: + BucketName: 'so0111-cloudtrailaccesslogs-{{global:ACCOUNT_ID}}-{{global:REGION}}' + AutomationAssumeRole: 'arn:{{global:AWS_PARTITION}}:iam::{{global:ACCOUNT_ID}}:role/SO0111-CreateAccessLoggingBucket' + + - name: Remediation + action: 'aws:executeAutomation' + isEnd: false + inputs: + DocumentName: AWS-ConfigureS3BucketLogging + RuntimeParameters: + BucketName: '{{ParseInput.CloudTrailBucket}}' + GrantedPermission: ['READ'] + GranteeType: ['Group'] + GranteeUri: ['http://acs.amazonaws.com/groups/s3/LogDelivery'] + TargetPrefix: ['{{ParseInput.CloudTrailBucket}}/'] + TargetBucket: ['so0111-cloudtrailaccesslogs-{{global:ACCOUNT_ID}}-{{global:REGION}}'] + AutomationAssumeRole: 'arn:{{global:AWS_PARTITION}}:iam::{{global:ACCOUNT_ID}}:role/SO0111-ConfigureS3BucketLogging' + + - name: UpdateFinding + action: 'aws:executeAwsApi' + inputs: + Service: securityhub + Api: BatchUpdateFindings + FindingIdentifiers: + - Id: '{{ParseInput.FindingId}}' + ProductArn: '{{ParseInput.ProductArn}}' + Note: + Text: 'Created S3 bucket so0111-cloudtrailaccesslogs-{{global:ACCOUNT_ID}}-{{global:REGION}} + for logging access to {{ParseInput.CloudTrailBucket}}' + UpdatedBy: 'ASR-AFSBP_1.0.0_S3.9' + Workflow: + Status: RESOLVED + description: Update finding + isEnd: true diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_SNS.1.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_SNS.1.yaml index 7e0130fa..2a047fe7 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_SNS.1.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_SNS.1.yaml @@ -15,7 +15,7 @@ description: | * Remediation.Output ## Documentation Links - * [AFSBP v1.0.0 SNS.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-sns-1) + * [AWS FSBP v1.0.0 SNS.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-sns-1) schemaVersion: '0.3' assumeRole: '{{ AutomationAssumeRole }}' @@ -51,7 +51,7 @@ mainSteps: - Name: AffectedObject Selector: $.Payload.object Type: StringMap - - Name: SNSTopicArn + - Name: TopicArn Selector: $.Payload.resource_id Type: String - Name: RemediationRegion @@ -84,7 +84,7 @@ mainSteps: RuntimeParameters: KmsKeyArn: '{{KmsKeyArn}}' AutomationAssumeRole: 'arn:{{global:AWS_PARTITION}}:iam::{{global:ACCOUNT_ID}}:role/SO0111-EnableEncryptionForSNSTopic' - TopicArn: '{{ParseInput.SNSTopicArn}}' + TopicArn: '{{ParseInput.TopicArn}}' - name: UpdateFinding action: 'aws:executeAwsApi' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_SNS.2.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_SNS.2.yaml index 0204af89..42d846bc 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_SNS.2.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_SNS.2.yaml @@ -16,7 +16,7 @@ description: | * Remediation.Output ## Documentation Links - * [AFSBP v1.0.0 SNS.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-sns-2) + * [AWS FSBP v1.0.0 SNS.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-sns-2) schemaVersion: '0.3' assumeRole: '{{ AutomationAssumeRole }}' parameters: @@ -33,9 +33,8 @@ parameters: allowedPattern: '^[\w+=,.@-]+$' LoggingRole: type: 'String' - default: >- - {{ssm:/Solutions/SO0111/DeliveryStatusLoggingRole}} - allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + default: 'SO0111-SNS2DeliveryStatusLogging-remediationRole' + allowedPattern: '^[\w+=,.@-]+$' outputs: - ParseInput.AffectedObject - Remediation.Output @@ -84,7 +83,7 @@ mainSteps: ExecutionRoleName: '{{RemediationRoleName}}' RuntimeParameters: AutomationAssumeRole: 'arn:{{global:AWS_PARTITION}}:iam::{{global:ACCOUNT_ID}}:role/{{RemediationRoleName}}' - LoggingRole: '{{LoggingRole}}' + LoggingRole: 'arn:{{global:AWS_PARTITION}}:iam::{{global:ACCOUNT_ID}}:role/{{LoggingRole}}' SNSTopicArn: '{{ParseInput.SNSTopicArn}}' - name: UpdateFinding action: 'aws:executeAwsApi' diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_SQS.1.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_SQS.1.yaml index 9039264f..d1fd985c 100644 --- a/source/playbooks/AFSBP/ssmdocs/AFSBP_SQS.1.yaml +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_SQS.1.yaml @@ -15,7 +15,7 @@ description: | * Remediation.Output ## Documentation Links - * [AFSBP v1.0.0 SQS.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-sqs-1) + * [AWS FSBP v1.0.0 SQS.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-sqs-1) schemaVersion: '0.3' assumeRole: '{{ AutomationAssumeRole }}' parameters: diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_SSM.4.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_SSM.4.yaml new file mode 100644 index 00000000..7225b49e --- /dev/null +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_SSM.4.yaml @@ -0,0 +1,96 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +description: | + ### Document Name - ASR-AFSBP_1.0.0_SSM.4 + + ## What does this document do? + This document modifies SSM document permissions to prevent cross-account public access. + + ## Input Parameters + * Finding: (Required) Security Hub finding details JSON + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + * RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. + + ## Output Parameters + * Remediation.Output + + ## Documentation Links + * [AFSBP v1.0.0 SSM.4](https://docs.aws.amazon.com/securityhub/latest/userguide/ssm-controls.html#ssm-4) +schemaVersion: '0.3' +assumeRole: '{{ AutomationAssumeRole }}' +parameters: + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + Finding: + type: StringMap + description: The input from the Orchestrator Step function for the SSM.4 finding + RemediationRoleName: + type: 'String' + default: 'SO0111-BlockSSMDocumentPublicAccess' + allowedPattern: '^[\w+=,.@-]+$' +outputs: + - ParseInput.AffectedObject + - Remediation.Output + +mainSteps: + - + name: ParseInput + action: 'aws:executeScript' + outputs: + - Name: FindingId + Selector: $.Payload.finding.Id + Type: String + - Name: ProductArn + Selector: $.Payload.finding.ProductArn + Type: String + - Name: AffectedObject + Selector: $.Payload.object + Type: StringMap + - Name: DocumentArn + Selector: $.Payload.resource_id + Type: String + - Name: RemediationRegion + Selector: $.Payload.resource_region + Type: String + - Name: RemediationAccount + Selector: $.Payload.account_id + Type: String + inputs: + InputPayload: + Finding: '{{Finding}}' + parse_id_pattern: '' + expected_control_id: + - 'SSM.4' + Runtime: python3.8 + Handler: parse_event + Script: |- + %%SCRIPT=common/parse_input.py%% + + - name: Remediation + action: 'aws:executeAutomation' + inputs: + DocumentName: ASR-BlockSSMDocumentPublicAccess + TargetLocations: + - Accounts: [ '{{ParseInput.RemediationAccount}}' ] + Regions: [ '{{ParseInput.RemediationRegion}}' ] + ExecutionRoleName: '{{RemediationRoleName}}' + RuntimeParameters: + AutomationAssumeRole: 'arn:{{global:AWS_PARTITION}}:iam::{{global:ACCOUNT_ID}}:role/{{RemediationRoleName}}' + DocumentArn: '{{ ParseInput.DocumentArn }}' + - name: UpdateFinding + action: 'aws:executeAwsApi' + inputs: + Service: securityhub + Api: BatchUpdateFindings + FindingIdentifiers: + - Id: '{{ParseInput.FindingId}}' + ProductArn: '{{ParseInput.ProductArn}}' + Note: + Text: 'SSM document changed from public to private' + UpdatedBy: 'ASR-AFSBP_1.0.0_SSM.4' + Workflow: + Status: RESOLVED + description: Update finding diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_SecretsManager.1.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_SecretsManager.1.yaml new file mode 100644 index 00000000..1cff3ff4 --- /dev/null +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_SecretsManager.1.yaml @@ -0,0 +1,121 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +description: | + ### Document Name - ASR-AFSBP_1.0.0_SecretsManager.1 + + ## What does this document do? + This document enables automatic rotation on a Secrets Manager secret if a Lambda function is already associated with it. + + ## Input Parameters + * Finding: (Required) Security Hub finding details JSON + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + * RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. + + ## Output Parameters + * Remediation.Output + + ## Documentation Links + * [AWS FSBP v1.0.0 SecretsManager.1](https://docs.aws.amazon.com/securityhub/latest/userguide/secretsmanager-controls.html#secretsmanager-1) +schemaVersion: '0.3' +assumeRole: '{{ AutomationAssumeRole }}' +parameters: + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + Finding: + type: StringMap + description: The input from the Orchestrator Step function for the SecretsManager.1 finding + RemediationRoleName: + type: 'String' + default: 'SO0111-EnableAutoSecretRotation' + allowedPattern: '^[\w+=,.@-]+$' + +outputs: + - ParseInput.AffectedObject + - Remediation.Output + +mainSteps: + - + name: ParseInput + action: 'aws:executeScript' + outputs: + - Name: FindingId + Selector: $.Payload.finding.Id + Type: String + - Name: ProductArn + Selector: $.Payload.finding.ProductArn + Type: String + - Name: AffectedObject + Selector: $.Payload.object + Type: StringMap + - Name: SecretARN + Selector: $.Payload.resource_id + Type: String + - Name: RemediationRegion + Selector: $.Payload.resource_region + Type: String + - Name: RemediationAccount + Selector: $.Payload.account_id + Type: String + - Name: SecHubInputParams + Selector: $.Payload.input_params + Type: StringMap + inputs: + InputPayload: + Finding: '{{Finding}}' + parse_id_pattern: '' + expected_control_id: + - 'SecretsManager.1' + Runtime: python3.8 + Handler: parse_event + Script: |- + %%SCRIPT=common/parse_input.py%% + - + name: GetInputParams + action: 'aws:executeScript' + outputs: + - Name: maximumAllowedRotationFrequency + Selector: $.Payload.maximumAllowedRotationFrequency + Type: Integer + inputs: + InputPayload: + SecHubInputParams: '{{ParseInput.SecHubInputParams}}' + DefaultParams: { + "maximumAllowedRotationFrequency": 90 + } + expected_control_id: [ 'SecretsManager.1' ] + Runtime: python3.8 + Handler: get_input_params + Script: |- + %%SCRIPT=common/get_input_params.py%% + + - + name: Remediation + action: 'aws:executeAutomation' + inputs: + DocumentName: ASR-EnableAutoSecretRotation + TargetLocations: + - Accounts: [ '{{ParseInput.RemediationAccount}}' ] + Regions: [ '{{ParseInput.RemediationRegion}}' ] + ExecutionRoleName: '{{ RemediationRoleName }}' + RuntimeParameters: + AutomationAssumeRole: 'arn:{{global:AWS_PARTITION}}:iam::{{global:ACCOUNT_ID}}:role/{{RemediationRoleName}}' + SecretARN: '{{ParseInput.SecretARN}}' + MaximumAllowedRotationFrequency: '{{GetInputParams.maximumAllowedRotationFrequency}}' + - + name: UpdateFinding + action: 'aws:executeAwsApi' + inputs: + Service: securityhub + Api: BatchUpdateFindings + FindingIdentifiers: + - Id: '{{ParseInput.FindingId}}' + ProductArn: '{{ParseInput.ProductArn}}' + Note: + Text: 'Enabled automatic rotation on secret and set schedule to 90 days.' + UpdatedBy: 'ASR-AFSBP_1.0.0_SecretsManager.1' + Workflow: + Status: RESOLVED + description: Update finding diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_SecretsManager.3.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_SecretsManager.3.yaml new file mode 100644 index 00000000..fa73dac1 --- /dev/null +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_SecretsManager.3.yaml @@ -0,0 +1,121 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +description: | + ### Document Name - ASR-AFSBP_1.0.0_SecretsManager.3 + + ## What does this document do? + This document deletes a secret that has been unused for the number of days specified in the unusedForDays parameter (Default: 90 days). + There is a 30 day period to recover the secret after it is deleted. + [DeleteSecret](https://docs.aws.amazon.com/secretsmanager/latest/apireference/API_DeleteSecret.html) API. + + ## Input Parameters + * Finding: (Required) Security Hub finding details JSON + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + * RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. + + ## Output Parameters + * Remediation.Output + + ## Documentation Links + * [AWS FSBP v1.0.0 SecretsManager.3](https://docs.aws.amazon.com/securityhub/latest/userguide/secretsmanager-controls.html#secretsmanager-3) +schemaVersion: '0.3' +assumeRole: '{{ AutomationAssumeRole }}' +parameters: + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + Finding: + type: StringMap + description: The input from the Orchestrator Step function for the SecretsManager.3 finding + RemediationRoleName: + type: 'String' + default: 'SO0111-RemoveUnusedSecret' + allowedPattern: '^[\w+=,.@-]+$' + +outputs: + - ParseInput.AffectedObject + - Remediation.Output + +mainSteps: + - name: ParseInput + action: 'aws:executeScript' + outputs: + - Name: FindingId + Selector: $.Payload.finding.Id + Type: String + - Name: ProductArn + Selector: $.Payload.finding.ProductArn + Type: String + - Name: AffectedObject + Selector: $.Payload.object + Type: StringMap + - Name: SecretARN + Selector: $.Payload.resource_id + Type: String + - Name: RemediationRegion + Selector: $.Payload.resource_region + Type: String + - Name: RemediationAccount + Selector: $.Payload.account_id + Type: String + - Name: SecHubInputParams + Selector: $.Payload.input_params + Type: StringMap + inputs: + InputPayload: + Finding: '{{Finding}}' + parse_id_pattern: '' + expected_control_id: + - 'SecretsManager.3' + Runtime: python3.8 + Handler: parse_event + Script: |- + %%SCRIPT=common/parse_input.py%% + + - name: GetInputParams + action: 'aws:executeScript' + outputs: + - Name: unusedForDays + Selector: $.Payload.unusedForDays + Type: Integer + inputs: + InputPayload: + SecHubInputParams: '{{ParseInput.SecHubInputParams}}' + DefaultParams: { + "unusedForDays": 90 + } + expected_control_id: [ 'SecretsManager.3' ] + Runtime: python3.8 + Handler: get_input_params + Script: |- + %%SCRIPT=common/get_input_params.py%% + + - name: Remediation + action: 'aws:executeAutomation' + inputs: + DocumentName: ASR-RemoveUnusedSecret + TargetLocations: + - Accounts: [ '{{ParseInput.RemediationAccount}}' ] + Regions: [ '{{ParseInput.RemediationRegion}}' ] + ExecutionRoleName: '{{ RemediationRoleName }}' + RuntimeParameters: + AutomationAssumeRole: 'arn:{{global:AWS_PARTITION}}:iam::{{global:ACCOUNT_ID}}:role/{{RemediationRoleName}}' + SecretARN: '{{ParseInput.SecretARN}}' + UnusedForDays: '{{GetInputParams.unusedForDays}}' + + - name: UpdateFinding + action: 'aws:executeAwsApi' + inputs: + Service: securityhub + Api: BatchUpdateFindings + FindingIdentifiers: + - Id: '{{ParseInput.FindingId}}' + ProductArn: '{{ParseInput.ProductArn}}' + Note: + Text: 'Removed the unused secret.' + UpdatedBy: 'ASR-AFSBP_1.0.0_SecretsManager.3' + Workflow: + Status: RESOLVED + description: Update finding diff --git a/source/playbooks/AFSBP/ssmdocs/AFSBP_SecretsManager.4.yaml b/source/playbooks/AFSBP/ssmdocs/AFSBP_SecretsManager.4.yaml new file mode 100644 index 00000000..480a9956 --- /dev/null +++ b/source/playbooks/AFSBP/ssmdocs/AFSBP_SecretsManager.4.yaml @@ -0,0 +1,120 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +description: | + ### Document Name - ASR-AFSBP_1.0.0_SecretsManager.4 + + ## What does this document do? + This document rotates a secret and sets its rotation period to 90 days. + + ## Input Parameters + * Finding: (Required) Security Hub finding details JSON + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + * RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. + + ## Output Parameters + * Remediation.Output + + ## Documentation Links + * [AWS FSBP v1.0.0 SecretsManager.4](https://docs.aws.amazon.com/securityhub/latest/userguide/secretsmanager-controls.html#secretsmanager-4) +schemaVersion: '0.3' +assumeRole: '{{ AutomationAssumeRole }}' +parameters: + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + Finding: + type: StringMap + description: The input from the Orchestrator Step function for the SecretsManager.4 finding + RemediationRoleName: + type: 'String' + default: 'SO0111-UpdateSecretRotationPeriod' + allowedPattern: '^[\w+=,.@-]+$' + +outputs: + - ParseInput.AffectedObject + - Remediation.Output + +mainSteps: + - + name: ParseInput + action: 'aws:executeScript' + outputs: + - Name: FindingId + Selector: $.Payload.finding.Id + Type: String + - Name: ProductArn + Selector: $.Payload.finding.ProductArn + Type: String + - Name: AffectedObject + Selector: $.Payload.object + Type: StringMap + - Name: SecretARN + Selector: $.Payload.resource_id + Type: String + - Name: RemediationRegion + Selector: $.Payload.resource_region + Type: String + - Name: RemediationAccount + Selector: $.Payload.account_id + Type: String + - Name: SecHubInputParams + Selector: $.Payload.input_params + Type: StringMap + inputs: + InputPayload: + Finding: '{{Finding}}' + parse_id_pattern: '' + expected_control_id: + - 'SecretsManager.4' + Runtime: python3.8 + Handler: parse_event + Script: |- + %%SCRIPT=common/parse_input.py%% + - + name: GetInputParams + action: 'aws:executeScript' + outputs: + - Name: maxDaysSinceRotation + Selector: $.Payload.maxDaysSinceRotation + Type: Integer + inputs: + InputPayload: + SecHubInputParams: '{{ParseInput.SecHubInputParams}}' + DefaultParams: { + "maxDaysSinceRotation": 90 + } + expected_control_id: [ 'SecretsManager.4' ] + Runtime: python3.8 + Handler: get_input_params + Script: |- + %%SCRIPT=common/get_input_params.py%% + - + name: Remediation + action: 'aws:executeAutomation' + inputs: + DocumentName: ASR-UpdateSecretRotationPeriod + TargetLocations: + - Accounts: [ '{{ParseInput.RemediationAccount}}' ] + Regions: [ '{{ParseInput.RemediationRegion}}' ] + ExecutionRoleName: '{{ RemediationRoleName }}' + RuntimeParameters: + AutomationAssumeRole: 'arn:{{global:AWS_PARTITION}}:iam::{{global:ACCOUNT_ID}}:role/{{RemediationRoleName}}' + SecretARN: '{{ParseInput.SecretARN}}' + MaxDaysSinceRotation: '{{GetInputParams.maxDaysSinceRotation}}' + - + name: UpdateFinding + action: 'aws:executeAwsApi' + inputs: + Service: securityhub + Api: BatchUpdateFindings + FindingIdentifiers: + - Id: '{{ParseInput.FindingId}}' + ProductArn: '{{ParseInput.ProductArn}}' + Note: + Text: 'Rotated secret and set rotation schedule to 90 days.' + UpdatedBy: 'ASR-AFSBP_1.0.0_SecretsManager.4' + Workflow: + Status: RESOLVED + description: Update finding diff --git a/source/playbooks/AFSBP/ssmdocs/scripts/deserializeApiList.py b/source/playbooks/AFSBP/ssmdocs/scripts/deserializeApiList.py index aa2b022f..316784e4 100644 --- a/source/playbooks/AFSBP/ssmdocs/scripts/deserializeApiList.py +++ b/source/playbooks/AFSBP/ssmdocs/scripts/deserializeApiList.py @@ -2,13 +2,18 @@ # SPDX-License-Identifier: Apache-2.0 import json + def runbook_handler(event, _): try: - deserialized = json.loads(event['SerializedList']) - if 'blacklistedActionPattern' in deserialized: - return deserialized['blacklistedActionPattern'] # Returns comma-delimited list in a string + deserialized = json.loads(event["SerializedList"]) + if "blacklistedActionPattern" in deserialized: + return deserialized[ + "blacklistedActionPattern" + ] # Returns comma-delimited list in a string else: - exit('Missing blacklistedActionPattern in AWS Config data') + exit("Missing blacklistedActionPattern in AWS Config data") except Exception as e: print(e) - exit('Failed getting comma-delimited string list of sensitive API calls input data') + exit( + "Failed getting comma-delimited string list of sensitive API calls input data" + ) diff --git a/source/playbooks/AFSBP/ssmdocs/scripts/test/test_s3-6_deserialize_api_list.py b/source/playbooks/AFSBP/ssmdocs/scripts/test/test_s3-6_deserialize_api_list.py index 2b85f7da..e3730884 100644 --- a/source/playbooks/AFSBP/ssmdocs/scripts/test/test_s3-6_deserialize_api_list.py +++ b/source/playbooks/AFSBP/ssmdocs/scripts/test/test_s3-6_deserialize_api_list.py @@ -1,15 +1,17 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import pytest import deserializeApiList as script + def event(): return { - "SerializedList": "{\"blacklistedActionPattern\":\"s3:DeleteBucketPolicy,s3:PutBucketAcl,s3:PutBucketPolicy,s3:PutObjectAcl,s3:PutEncryptionConfiguration\"}" + "SerializedList": '{"blacklistedActionPattern":"s3:DeleteBucketPolicy,s3:PutBucketAcl,s3:PutBucketPolicy,s3:PutObjectAcl,s3:PutEncryptionConfiguration"}' } + def expected(): return "s3:DeleteBucketPolicy,s3:PutBucketAcl,s3:PutBucketPolicy,s3:PutObjectAcl,s3:PutEncryptionConfiguration" + def test_extract_list(): assert script.runbook_handler(event(), {}) == expected() diff --git a/source/playbooks/AFSBP/support.txt b/source/playbooks/AFSBP/support.txt index 78889ec6..9d4c4c07 100644 --- a/source/playbooks/AFSBP/support.txt +++ b/source/playbooks/AFSBP/support.txt @@ -1,3 +1,3 @@ -AFSBP v1.0.0 Remediation Playbook - Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the 'License'); You may not use this product template except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0. +AWS FSBP v1.0.0 Remediation Playbook - Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the 'License'); You may not use this product template except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0. Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. diff --git a/source/playbooks/AFSBP/test/__snapshots__/afsbp_stack.test.ts.snap b/source/playbooks/AFSBP/test/__snapshots__/afsbp_stack.test.ts.snap index 75a3bcf2..13127763 100644 --- a/source/playbooks/AFSBP/test/__snapshots__/afsbp_stack.test.ts.snap +++ b/source/playbooks/AFSBP/test/__snapshots__/afsbp_stack.test.ts.snap @@ -84,7 +84,7 @@ This document changes all public EC2 snapshots to private * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. ## Documentation Links -* [AFSBP EC2.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-1) +* [AWS FSBP EC2.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-1) ", "mainSteps": [ { @@ -102,106 +102,109 @@ This document changes all public EC2 snapshots to private "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -211,101 +214,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, }", }, "isEnd": false, @@ -419,7 +463,7 @@ function. The remediation is to remove the SID of the public policy. * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. ## Documentation Links -* [AFSBP Lambda.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-lambda-1) +* [AWS FSBP Lambda.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-lambda-1) ", "mainSteps": [ { @@ -436,106 +480,109 @@ function. The remediation is to remove the SID of the public policy. "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -545,101 +592,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, }", }, "name": "ParseInput", @@ -768,7 +856,7 @@ This document changes public RDS snapshot to private * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. ## Documentation Links -* [AFSBP RDS.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-1) +* [AWS FSBP RDS.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-1) ", "mainSteps": [ { @@ -780,112 +868,115 @@ This document changes public RDS snapshot to private "expected_control_id": [ "RDS.1", ], - "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:(cluster-snapshot|snapshot):([a-zA-Z](?:[0-9a-zA-Z]+-)*[0-9a-zA-Z]+)$", + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:(cluster-snapshot|snapshot):([a-zA-Z][0-9a-zA-Z]*(?:-[0-9a-zA-Z]+)*)$", "resource_index": 2, }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -895,101 +986,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, }", }, "name": "ParseInput", @@ -1119,7 +1251,7 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 1, "DeleteIntervalSeconds": 0, - "DocumentPropertiesHash": "897540159cc11ecdc5d175eee99cc91041790b69ca60e8f6f1ae5cd3b72084be", + "DocumentPropertiesHash": "b02d58f4b8b5dbdfd43ae5fb533c84039059d469bff02db8c1af802e2ce6eea2", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -1136,7 +1268,7 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 0, "DeleteIntervalSeconds": 0.5, - "DocumentPropertiesHash": "897540159cc11ecdc5d175eee99cc91041790b69ca60e8f6f1ae5cd3b72084be", + "DocumentPropertiesHash": "b02d58f4b8b5dbdfd43ae5fb533c84039059d469bff02db8c1af802e2ce6eea2", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, diff --git a/source/playbooks/CIS120/ssmdocs/CIS_2.7.yaml b/source/playbooks/CIS120/ssmdocs/CIS_2.7.yaml index bd0524ee..1e6ad162 100644 --- a/source/playbooks/CIS120/ssmdocs/CIS_2.7.yaml +++ b/source/playbooks/CIS120/ssmdocs/CIS_2.7.yaml @@ -30,6 +30,10 @@ parameters: default: >- {{ssm:/Solutions/SO0111/CMK_REMEDIATION_ARN}} allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):kms:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:(?:(?:alias/[A-Za-z0-9/-_])|(?:key/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})))$' + RemediationRoleName: + type: String + default: "SO0111-EnableCloudTrailEncryption" + allowedPattern: '^[\w+=,.@-]+$' mainSteps: - @@ -51,6 +55,12 @@ mainSteps: - Name: TrailRegion Selector: $.Payload.resource_region Type: String + - Name: RemediationRegion + Selector: $.Payload.resource_region + Type: String + - Name: RemediationAccount + Selector: $.Payload.account_id + Type: String inputs: InputPayload: Finding: '{{Finding}}' @@ -67,11 +77,14 @@ mainSteps: action: 'aws:executeAutomation' inputs: DocumentName: ASR-EnableCloudTrailEncryption + TargetLocations: + - Accounts: [ '{{ParseInput.RemediationAccount}}' ] + Regions: [ '{{ParseInput.RemediationRegion}}' ] + ExecutionRoleName: '{{RemediationRoleName}}' RuntimeParameters: TrailRegion: '{{ParseInput.TrailRegion}}' TrailArn: '{{ParseInput.TrailArn}}' AutomationAssumeRole: 'arn:{{global:AWS_PARTITION}}:iam::{{global:ACCOUNT_ID}}:role/SO0111-EnableCloudTrailEncryption' - KMSKeyArn: '{{KMSKeyArn}}' - name: UpdateFinding action: 'aws:executeAwsApi' diff --git a/source/playbooks/CIS120/test/__snapshots__/cis_stack.test.ts.snap b/source/playbooks/CIS120/test/__snapshots__/cis_stack.test.ts.snap index 42007b59..f7dd9c2f 100644 --- a/source/playbooks/CIS120/test/__snapshots__/cis_stack.test.ts.snap +++ b/source/playbooks/CIS120/test/__snapshots__/cis_stack.test.ts.snap @@ -484,106 +484,109 @@ This document ensures that credentials unused for 90 days or greater are disable "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -593,101 +596,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, }", }, "isEnd": false, @@ -832,106 +876,109 @@ This document establishes a default password policy. "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -941,101 +988,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, }", }, "isEnd": false, @@ -1162,106 +1250,109 @@ Note: this remediation will create a NEW trail. "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -1271,101 +1362,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, }", }, "isEnd": false, @@ -1469,7 +1601,7 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 1, "DeleteIntervalSeconds": 0, - "DocumentPropertiesHash": "c275c0d9e95945c9a2e73ba060457a2ea175ae65c3cb302192fc92a2c23d215f", + "DocumentPropertiesHash": "8d6bbace1b02b19cfe1e0b6cfa73ca2612d705b5c6c1e8c6a7fbf90d91d9232b", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -1486,7 +1618,7 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 0, "DeleteIntervalSeconds": 0.5, - "DocumentPropertiesHash": "c275c0d9e95945c9a2e73ba060457a2ea175ae65c3cb302192fc92a2c23d215f", + "DocumentPropertiesHash": "8d6bbace1b02b19cfe1e0b6cfa73ca2612d705b5c6c1e8c6a7fbf90d91d9232b", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, diff --git a/source/playbooks/CIS140/lib/cis140_playbook-construct.ts b/source/playbooks/CIS140/lib/cis140_playbook-construct.ts index c9e2e50c..6125d602 100644 --- a/source/playbooks/CIS140/lib/cis140_playbook-construct.ts +++ b/source/playbooks/CIS140/lib/cis140_playbook-construct.ts @@ -36,7 +36,7 @@ export class CIS140PlaybookMemberStack extends Stack { const waitProvider = WaitProvider.fromServiceToken( this, 'WaitProvider', - waitProviderServiceTokenParam.valueAsString + waitProviderServiceTokenParam.valueAsString, ); Aspects.of(this).add(new SsmDocRateLimit(waitProvider)); diff --git a/source/playbooks/CIS140/test/__snapshots__/cis_stack.test.ts.snap b/source/playbooks/CIS140/test/__snapshots__/cis_stack.test.ts.snap index 34468f0f..9b9bbacb 100644 --- a/source/playbooks/CIS140/test/__snapshots__/cis_stack.test.ts.snap +++ b/source/playbooks/CIS140/test/__snapshots__/cis_stack.test.ts.snap @@ -58,18 +58,7 @@ exports[`default stack 1`] = ` ], }, "GeneratorId": [ - { - "Fn::Join": [ - "", - [ - "arn:", - { - "Ref": "AWS::Partition", - }, - ":securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.4.0/rule/1.1", - ], - ], - }, + "cis-aws-foundations-benchmark/v/1.4.0/1.1", ], "RecordState": [ "ACTIVE", @@ -162,18 +151,7 @@ exports[`default stack 1`] = ` ], }, "GeneratorId": [ - { - "Fn::Join": [ - "", - [ - "arn:", - { - "Ref": "AWS::Partition", - }, - ":securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.4.0/rule/1.2", - ], - ], - }, + "cis-aws-foundations-benchmark/v/1.4.0/1.2", ], "RecordState": [ "ACTIVE", @@ -266,18 +244,7 @@ exports[`default stack 1`] = ` ], }, "GeneratorId": [ - { - "Fn::Join": [ - "", - [ - "arn:", - { - "Ref": "AWS::Partition", - }, - ":securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.4.0/rule/1.3", - ], - ], - }, + "cis-aws-foundations-benchmark/v/1.4.0/1.3", ], "RecordState": [ "ACTIVE", @@ -484,106 +451,109 @@ This document ensures that credentials unused for 90 days or greater are disable "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -593,101 +563,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, }", }, "isEnd": false, @@ -832,106 +843,109 @@ This document establishes a default password policy. "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -941,101 +955,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, }", }, "isEnd": false, @@ -1162,106 +1217,109 @@ Note: this remediation will create a NEW trail. "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -1271,101 +1329,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, }", }, "isEnd": false, @@ -1469,7 +1568,7 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 1, "DeleteIntervalSeconds": 0, - "DocumentPropertiesHash": "27701e827ac749863e3438750a89ec6469824e498d539aaacc500caef62d39e1", + "DocumentPropertiesHash": "29806936235bf5f53ae2b2c71b0e9b1da3247c58d35e440690e31f6c8a4ddf5d", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -1486,7 +1585,7 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 0, "DeleteIntervalSeconds": 0.5, - "DocumentPropertiesHash": "27701e827ac749863e3438750a89ec6469824e498d539aaacc500caef62d39e1", + "DocumentPropertiesHash": "29806936235bf5f53ae2b2c71b0e9b1da3247c58d35e440690e31f6c8a4ddf5d", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, diff --git a/source/playbooks/NEWPLAYBOOK/ssmdocs/scripts/test/test_parse_event.py b/source/playbooks/NEWPLAYBOOK/ssmdocs/scripts/test/test_parse_event.py deleted file mode 100644 index 0307cdb6..00000000 --- a/source/playbooks/NEWPLAYBOOK/ssmdocs/scripts/test/test_parse_event.py +++ /dev/null @@ -1,177 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 -import pytest - -from newplaybook_parse_input import parse_event -def event(): - return { - 'expected_control_id': '2.3', - 'parse_id_pattern': '^arn:(?:aws|aws-cn|aws-us-gov):s3:::([A-Za-z0-9.-]{3,63})$', - 'Finding': { - "ProductArn": "arn:aws:securityhub:us-east-2::product/aws/securityhub", - "Types": [ - "Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" - ], - "Description": "Details: 2.3 Ensure the S3 bucket used to store CloudTrail logs is not publicly accessible", - "SchemaVersion": "2018-10-08", - "Compliance": { - "Status": "WARNING", - "StatusReasons": [ - { - "Description": "The finding is in a WARNING state, because the S3 Bucket associated with this rule is in a different region/account. This rule does not support cross-region/cross-account checks, so it is recommended to disable this control in this region/account and only run it in the region/account where the resource is located.", - "ReasonCode": "S3_BUCKET_CROSS_ACCOUNT_CROSS_REGION" - } - ] - }, - "GeneratorId": "arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/2.3", - "FirstObservedAt": "2020-05-20T05:02:44.203Z", - "CreatedAt": "2020-05-20T05:02:44.203Z", - "RecordState": "ACTIVE", - "Title": "2.3 Ensure the S3 bucket used to store CloudTrail logs is not publicly accessible", - "Workflow": { - "Status": "NEW" - }, - "LastObservedAt": "2020-06-17T13:01:35.884Z", - "Severity": { - "Normalized": 90, - "Label": "CRITICAL", - "Product": 90, - "Original": "CRITICAL" - }, - "UpdatedAt": "2020-06-17T13:01:25.561Z", - "WorkflowState": "NEW", - "ProductFields": { - "StandardsGuideArn": "arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0", - "StandardsGuideSubscriptionArn": "arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0", - "RuleId": "2.3", - "RecommendationUrl": "https://docs.aws.amazon.com/console/securityhub/standards-cis-2.3/remediation", - "RelatedAWSResources:0/name": "securityhub-s3-bucket-public-read-prohibited-4414615a", - "RelatedAWSResources:0/type": "AWS::Config::ConfigRule", - "RelatedAWSResources:1/name": "securityhub-s3-bucket-public-write-prohibited-f104fcda", - "RelatedAWSResources:1/type": "AWS::Config::ConfigRule", - "StandardsControlArn": "arn:aws:securityhub:us-east-2:111111111111:control/cis-aws-foundations-benchmark/v/1.2.0/2.3", - "aws/securityhub/SeverityLabel": "CRITICAL", - "aws/securityhub/ProductName": "Security Hub", - "aws/securityhub/CompanyName": "AWS", - "aws/securityhub/annotation": "The finding is in a WARNING state, because the S3 Bucket associated with this rule is in a different region/account. This rule does not support cross-region/cross-account checks, so it is recommended to disable this control in this region/account and only run it in the region/account where the resource is located.", - "aws/securityhub/FindingId": "arn:aws:securityhub:us-east-2::product/aws/securityhub/arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0/2.3/finding/f51c716c-b33c-4949-b748-2ffd22bdceec" - }, - "AwsAccountId": "111111111111", - "Id": "arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0/2.3/finding/f51c716c-b33c-4949-b748-2ffd22bdceec", - "Remediation": { - "Recommendation": { - "Text": "For directions on how to fix this issue, please consult the AWS Security Hub CIS documentation.", - "Url": "https://docs.aws.amazon.com/console/securityhub/standards-cis-2.3/remediation" - } - }, - "Resources": [ - { - "Partition": "aws", - "Type": "AwsS3Bucket", - "Region": "us-east-2", - "Id": "arn:aws:s3:::cloudtrail-awslogs-111111111111-kjfskljdfl" - } - ] - } - } - -def expected(): - return { - "account_id": '111111111111', - "resource_id": 'cloudtrail-awslogs-111111111111-kjfskljdfl', - "finding_id": 'arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0/2.3/finding/f51c716c-b33c-4949-b748-2ffd22bdceec', - "product_arn": 'arn:aws:securityhub:us-east-2::product/aws/securityhub', - "control_id": '2.3', - "object": { - "Type": 'AwsS3Bucket', - "Id": 'cloudtrail-awslogs-111111111111-kjfskljdfl', - "OutputKey": 'Remediation.Output' - }, - "matches": [ "cloudtrail-awslogs-111111111111-kjfskljdfl" ] - } - -def test_parse_event(): - parsed_event = parse_event(event(), {}) - assert parsed_event == expected() - -def test_parse_event_multimatch(): - expected_result = expected() - expected_result['matches'] = [ - "aws", - "cloudtrail-awslogs-111111111111-kjfskljdfl" - ] - test_event = event() - test_event['resource_index'] = 2 - test_event['parse_id_pattern'] = '^arn:((?:aws|aws-cn|aws-us-gov)):s3:::([A-Za-z0-9.-]{3,63})$' - parsed_event = parse_event(test_event, {}) - assert parsed_event == expected_result - -def test_bad_finding_id(): - test_event = event() - test_event['Finding']['Id'] = "badvalue" - with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) - assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Finding Id is invalid: badvalue' - -def test_bad_control_id(): - test_event = event() - test_event['Finding']['Id'] = "arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0//finding/f51c716c-b33c-4949-b748-2ffd22bdceec" - with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) - assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Finding Id is invalid: arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0//finding/f51c716c-b33c-4949-b748-2ffd22bdceec - missing Control Id' - -def test_control_id_nomatch(): - test_event = event() - test_event['Finding']['Id'] = "arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0/2.4/finding/f51c716c-b33c-4949-b748-2ffd22bdceec" - with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) - assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Control Id from input (2.4) does not match 2.3' - -def test_bad_account_id(): - test_event = event() - test_event['Finding']['AwsAccountId'] = "1234123412345" - with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) - assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: AwsAccountId is invalid: 1234123412345' - -def test_bad_productarn(): - test_event = event() - test_event['Finding']['ProductArn'] = "badvalue" - with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) - assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: ProductArn is invalid: badvalue' - -def test_bad_resource_match(): - test_event = event() - test_event['parse_id_pattern'] = '^arn:(?:aws|aws-cn|aws-us-gov):logs:::([A-Za-z0-9.-]{3,63})$' - with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) - assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Invalid resource Id arn:aws:s3:::cloudtrail-awslogs-111111111111-kjfskljdfl' - -def test_no_resource_pattern(): - test_event = event() - expected_result = expected() - - test_event['parse_id_pattern'] = '' - expected_result['resource_id'] = 'arn:aws:s3:::cloudtrail-awslogs-111111111111-kjfskljdfl' - expected_result['matches'] = [] - expected_result['object']['Id'] = expected_result['resource_id'] - parsed_event = parse_event(test_event, {}) - assert parsed_event == expected_result - -def test_no_resource_pattern_no_resource_id(): - test_event = event() - - test_event['parse_id_pattern'] = '' - test_event['Finding']['Resources'][0]['Id'] = '' - - with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) - assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Resource Id is missing from the finding json Resources (Id)' diff --git a/source/playbooks/NEWPLAYBOOK/test/__snapshots__/newplaybook_stack.test.ts.snap b/source/playbooks/NEWPLAYBOOK/test/__snapshots__/newplaybook_stack.test.ts.snap index d9a0422f..52b32276 100644 --- a/source/playbooks/NEWPLAYBOOK/test/__snapshots__/newplaybook_stack.test.ts.snap +++ b/source/playbooks/NEWPLAYBOOK/test/__snapshots__/newplaybook_stack.test.ts.snap @@ -410,106 +410,109 @@ This document enables \`Enhanced Monitoring\` on a given Amazon RDS instance by "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -519,101 +522,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, }", }, "isEnd": false, @@ -757,7 +801,7 @@ def verify_remediation(event, context): "Properties": { "CreateIntervalSeconds": 1, "DeleteIntervalSeconds": 0, - "DocumentPropertiesHash": "59676fa5d188027bd00a93038c2c1e278186320cd9e86edd24c7fb7c89adb88a", + "DocumentPropertiesHash": "ba760c2ec83ae337d260ae23a7ebbc20181cddeaba0c5b8efc5addfbeb101fda", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -774,7 +818,7 @@ def verify_remediation(event, context): "Properties": { "CreateIntervalSeconds": 0, "DeleteIntervalSeconds": 0.5, - "DocumentPropertiesHash": "59676fa5d188027bd00a93038c2c1e278186320cd9e86edd24c7fb7c89adb88a", + "DocumentPropertiesHash": "ba760c2ec83ae337d260ae23a7ebbc20181cddeaba0c5b8efc5addfbeb101fda", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, diff --git a/source/playbooks/NIST80053/bin/nist80053.ts b/source/playbooks/NIST80053/bin/nist80053.ts new file mode 100644 index 00000000..a9d501ba --- /dev/null +++ b/source/playbooks/NIST80053/bin/nist80053.ts @@ -0,0 +1,115 @@ +#!/usr/bin/env node +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { NIST80053PlaybookMemberStack } from '../lib/NIST80053_playbook-construct'; +import { App, Aspects, DefaultStackSynthesizer } from 'aws-cdk-lib'; +import { AwsSolutionsChecks } from 'cdk-nag'; +import 'source-map-support/register'; +import { PlaybookPrimaryStack, IControl } from '../../../lib/sharrplaybook-construct'; + +// SOLUTION_* - set by solution_env.sh +const SOLUTION_ID = process.env['SOLUTION_ID'] || 'undefined'; +const SOLUTION_NAME = process.env['SOLUTION_NAME'] || 'undefined'; +// DIST_* - set by build-s3-dist.sh +const DIST_VERSION = process.env['DIST_VERSION'] || '%%VERSION%%'; +const DIST_OUTPUT_BUCKET = process.env['DIST_OUTPUT_BUCKET'] || '%%BUCKET%%'; +const DIST_SOLUTION_NAME = process.env['DIST_SOLUTION_NAME'] || '%%SOLUTION%%'; + +const standardShortName = 'NIST80053R5'; +const standardLongName = 'nist-800-53'; +const standardVersion = '5.0.0'; // DO NOT INCLUDE 'V' + +const app = new App(); +Aspects.of(app).add(new AwsSolutionsChecks()); + +// Creates one rule per control Id. The Step Function determines what document to run based on +// Security Standard and Control Id. See cis-member-stack +const remediations: IControl[] = [ + { control: 'AutoScaling.1' }, + { control: 'CloudFormation.1' }, + { control: 'CloudFront.1' }, + { control: 'CloudFront.12' }, + { control: 'CloudTrail.1' }, + { control: 'CloudTrail.2' }, + { control: 'CloudTrail.4' }, + { control: 'CloudTrail.5' }, + { control: 'CodeBuild.2' }, + { control: 'CodeBuild.5' }, + { control: 'Config.1' }, + { control: 'EC2.1' }, + { control: 'EC2.2' }, + { control: 'EC2.4' }, + { control: 'EC2.6' }, + { control: 'EC2.7' }, + { control: 'EC2.8' }, + { control: 'EC2.13' }, + { control: 'EC2.15' }, + { control: 'EC2.18' }, + { control: 'EC2.19' }, + { control: 'EC2.23' }, + { control: 'IAM.3' }, + { control: 'IAM.7' }, + { control: 'IAM.8' }, + { control: 'KMS.4' }, + { control: 'Lambda.1' }, + { control: 'RDS.1' }, + { control: 'RDS.2' }, + { control: 'RDS.4' }, + { control: 'RDS.5' }, + { control: 'RDS.6' }, + { control: 'RDS.7' }, + { control: 'RDS.8' }, + { control: 'RDS.13' }, + { control: 'RDS.16' }, + { control: 'Redshift.1' }, + { control: 'Redshift.3' }, + { control: 'Redshift.4' }, + { control: 'Redshift.6' }, + { control: 'S3.1' }, + { control: 'S3.2' }, + { control: 'S3.3', executes: 'S3.2' }, + { control: 'S3.4' }, + { control: 'S3.5' }, + { control: 'S3.6' }, + { control: 'S3.8', executes: 'S3.2' }, + { control: 'S3.9' }, + { control: 'S3.11' }, + { control: 'S3.13' }, + { control: 'SecretsManager.1' }, + { control: 'SecretsManager.3' }, + { control: 'SecretsManager.4' }, + { control: 'SNS.1' }, + { control: 'SNS.2' }, + { control: 'SQS.1' }, + { control: 'SSM.4' }, +]; + +const adminStack = new PlaybookPrimaryStack(app, 'NIST80053Stack', { + analyticsReporting: false, // CDK::Metadata breaks StackSets in some regions + synthesizer: new DefaultStackSynthesizer({ generateBootstrapVersionRule: false }), + description: `(${SOLUTION_ID}P) ${SOLUTION_NAME} ${standardShortName} ${standardVersion} Compliance Pack - Admin Account, ${DIST_VERSION}`, + solutionId: SOLUTION_ID, + solutionVersion: DIST_VERSION, + solutionDistBucket: DIST_OUTPUT_BUCKET, + solutionDistName: DIST_SOLUTION_NAME, + remediations: remediations, + securityStandardLongName: standardLongName, + securityStandard: standardShortName, + securityStandardVersion: standardVersion, +}); + +const memberStack = new NIST80053PlaybookMemberStack(app, 'NIST80053MemberStack', { + analyticsReporting: false, // CDK::Metadata breaks StackSets in some regions + synthesizer: new DefaultStackSynthesizer({ generateBootstrapVersionRule: false }), + description: `(${SOLUTION_ID}M) ${SOLUTION_NAME} ${standardShortName} ${standardVersion} Compliance Pack - Member Account, ${DIST_VERSION}`, + solutionId: SOLUTION_ID, + solutionVersion: DIST_VERSION, + solutionDistBucket: DIST_OUTPUT_BUCKET, + securityStandard: standardShortName, + securityStandardVersion: standardVersion, + securityStandardLongName: standardLongName, + remediations: remediations, +}); + +adminStack.templateOptions.templateFormatVersion = '2010-09-09'; +memberStack.templateOptions.templateFormatVersion = '2010-09-09'; diff --git a/source/playbooks/NIST80053/cdk.json b/source/playbooks/NIST80053/cdk.json new file mode 100644 index 00000000..8a25cba6 --- /dev/null +++ b/source/playbooks/NIST80053/cdk.json @@ -0,0 +1,7 @@ +{ + "app": "npx ts-node bin/nist80053.ts", + "versionReporting": false, + "context": { + "aws-cdk:enableDiffNoFail": "true" + } +} \ No newline at end of file diff --git a/source/playbooks/NIST80053/description.txt b/source/playbooks/NIST80053/description.txt new file mode 100644 index 00000000..893fc7bf --- /dev/null +++ b/source/playbooks/NIST80053/description.txt @@ -0,0 +1,3 @@ +Automated Response on AWS is an add-on solution that +enables AWS Security Hub customers to remediate security findings with a single click +using predefined response and remediation actions packaged in “Playbooks”. This Playbook, NIST80053, includes remediations for many of the NIST 800-53Rev5 Security Standard findings. \ No newline at end of file diff --git a/source/playbooks/NIST80053/lib/NIST80053_playbook-construct.ts b/source/playbooks/NIST80053/lib/NIST80053_playbook-construct.ts new file mode 100644 index 00000000..02cd8c1b --- /dev/null +++ b/source/playbooks/NIST80053/lib/NIST80053_playbook-construct.ts @@ -0,0 +1,62 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Stack, App, StackProps, CfnParameter, Aspects } from 'aws-cdk-lib'; +import { ControlRunbooks } from './control_runbooks-construct'; +import AdminAccountParam from '../../../lib/admin-account-param'; +import { Runtime } from 'aws-cdk-lib/aws-lambda'; +import { IControl } from '../../../lib/sharrplaybook-construct'; +import { WaitProvider } from '../../../lib/wait-provider'; +import SsmDocRateLimit from '../../../lib/ssm-doc-rate-limit'; + +export interface NIST80053PlaybookMemberStackProps extends StackProps { + solutionId: string; + solutionVersion: string; + solutionDistBucket: string; + securityStandard: string; + securityStandardVersion: string; + securityStandardLongName: string; + ssmdocs?: string; + commonScripts?: string; + remediations: IControl[]; +} + +export class NIST80053PlaybookMemberStack extends Stack { + constructor(scope: App, id: string, props: NIST80053PlaybookMemberStackProps) { + super(scope, id, props); + + // Not used, but required by top-level member stack + new AdminAccountParam(this, 'AdminAccountParameter'); + + const waitProviderServiceTokenParam = new CfnParameter(this, 'WaitProviderServiceToken'); + + const waitProvider = WaitProvider.fromServiceToken( + this, + 'WaitProvider', + waitProviderServiceTokenParam.valueAsString, + ); + + Aspects.of(this).add(new SsmDocRateLimit(waitProvider)); + + const controlRunbooks = new ControlRunbooks(this, 'ControlRunbooks', { + standardShortName: props.securityStandard, + standardLongName: props.securityStandardLongName, + standardVersion: props.securityStandardVersion, + runtimePython: Runtime.PYTHON_3_8, // Newest runtime for SSM automations + solutionId: props.solutionId, + solutionAcronym: 'ASR', + solutionVersion: props.solutionVersion, + }); + + // Make sure all known controls have runbooks + for (const remediation of props.remediations) { + // Skip remapped controls + if (remediation.executes && remediation.executes !== remediation.control) { + continue; + } + + if (!controlRunbooks.has(remediation.control)) { + throw new Error(`No control runbook implemented for ${remediation.control}`); + } + } + } +} diff --git a/source/playbooks/NIST80053/lib/control_runbooks-construct.ts b/source/playbooks/NIST80053/lib/control_runbooks-construct.ts new file mode 100644 index 00000000..a7ec1c54 --- /dev/null +++ b/source/playbooks/NIST80053/lib/control_runbooks-construct.ts @@ -0,0 +1,179 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Runtime } from 'aws-cdk-lib/aws-lambda'; +import { Construct } from 'constructs'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { CfnCondition, CfnParameter, Fn } from 'aws-cdk-lib'; + +import * as autoscaling_1 from '../ssmdocs/NIST80053_AutoScaling.1'; +import * as cloudformation_1 from '../ssmdocs/NIST80053_CloudFormation.1'; +import * as cloudfront_1 from '../ssmdocs/NIST80053_CloudFront.1'; +import * as cloudfront_12 from '../ssmdocs/NIST80053_CloudFront.12'; +import * as cloudtrail_1 from '../ssmdocs/NIST80053_CloudTrail.1'; +import * as cloudtrail_2 from '../ssmdocs/NIST80053_CloudTrail.2'; +import * as cloudtrail_4 from '../ssmdocs/NIST80053_CloudTrail.4'; +import * as cloudtrail_5 from '../ssmdocs/NIST80053_CloudTrail.5'; +import * as codebuild_2 from '../ssmdocs/NIST80053_CodeBuild.2'; +import * as codebuild_5 from '../ssmdocs/NIST80053_CodeBuild.5'; +import * as config_1 from '../ssmdocs/NIST80053_Config.1'; +import * as ec2_1 from '../ssmdocs/NIST80053_EC2.1'; +import * as ec2_2 from '../ssmdocs/NIST80053_EC2.2'; +import * as ec2_4 from '../ssmdocs/NIST80053_EC2.4'; +import * as ec2_6 from '../ssmdocs/NIST80053_EC2.6'; +import * as ec2_7 from '../ssmdocs/NIST80053_EC2.7'; +import * as ec2_8 from '../ssmdocs/NIST80053_EC2.8'; +import * as ec2_13 from '../ssmdocs/NIST80053_EC2.13'; +import * as ec2_15 from '../ssmdocs/NIST80053_EC2.15'; +import * as ec2_18 from '../ssmdocs/NIST80053_EC2.18'; +import * as ec2_19 from '../ssmdocs/NIST80053_EC2.19'; +import * as ec2_23 from '../ssmdocs/NIST80053_EC2.23'; +import * as ecr_1 from '../ssmdocs/NIST80053_ECR.1'; +import * as guardduty_1 from '../ssmdocs/NIST80053_GuardDuty.1'; +import * as iam_3 from '../ssmdocs/NIST80053_IAM.3'; +import * as iam_7 from '../ssmdocs/NIST80053_IAM.7'; +import * as iam_8 from '../ssmdocs/NIST80053_IAM.8'; +import * as kms_4 from '../ssmdocs/NIST80053_KMS.4'; +import * as lambda_1 from '../ssmdocs/NIST80053_Lambda.1'; +import * as rds_1 from '../ssmdocs/NIST80053_RDS.1'; +import * as rds_2 from '../ssmdocs/NIST80053_RDS.2'; +import * as rds_4 from '../ssmdocs/NIST80053_RDS.4'; +import * as rds_5 from '../ssmdocs/NIST80053_RDS.5'; +import * as rds_6 from '../ssmdocs/NIST80053_RDS.6'; +import * as rds_7 from '../ssmdocs/NIST80053_RDS.7'; +import * as rds_8 from '../ssmdocs/NIST80053_RDS.8'; +import * as rds_13 from '../ssmdocs/NIST80053_RDS.13'; +import * as rds_16 from '../ssmdocs/NIST80053_RDS.16'; +import * as redshift_1 from '../ssmdocs/NIST80053_Redshift.1'; +import * as redshift_3 from '../ssmdocs/NIST80053_Redshift.3'; +import * as redshift_4 from '../ssmdocs/NIST80053_Redshift.4'; +import * as redshift_6 from '../ssmdocs/NIST80053_Redshift.6'; +import * as s3_1 from '../ssmdocs/NIST80053_S3.1'; +import * as s3_2 from '../ssmdocs/NIST80053_S3.2'; +import * as s3_4 from '../ssmdocs/NIST80053_S3.4'; +import * as s3_5 from '../ssmdocs/NIST80053_S3.5'; +import * as s3_6 from '../ssmdocs/NIST80053_S3.6'; +import * as s3_9 from '../ssmdocs/NIST80053_S3.9'; +import * as s3_11 from '../ssmdocs/NIST80053_S3.11'; +import * as s3_13 from '../ssmdocs/NIST80053_S3.13'; +import * as secretsmanager_1 from '../ssmdocs/NIST80053_SecretsManager.1'; +import * as secretsmanager_3 from '../ssmdocs/NIST80053_SecretsManager.3'; +import * as secretsmanager_4 from '../ssmdocs/NIST80053_SecretsManager.4'; +import * as sqs_1 from '../ssmdocs/NIST80053_SQS.1'; +import * as sns_1 from '../ssmdocs/NIST80053_SNS.1'; +import * as sns_2 from '../ssmdocs/NIST80053_SNS.2'; +import * as ssm_4 from '../ssmdocs/NIST80053_SSM.4'; + +export interface PlaybookProps { + standardShortName: string; + standardLongName: string; + standardVersion: string; + runtimePython: Runtime; + solutionId: string; + solutionAcronym: string; + solutionVersion: string; +} + +export class ControlRunbooks extends Construct { + protected readonly standardLongName: string; + protected readonly standardVersion: string; + protected controls: Set = new Set(); + + constructor(scope: Construct, id: string, props: PlaybookProps) { + super(scope, id); + + this.standardLongName = props.standardLongName; + this.standardVersion = props.standardVersion; + + this.add(autoscaling_1.createControlRunbook(this, 'AutoScaling.1', props)); + this.add(cloudformation_1.createControlRunbook(this, 'CloudFormation.1', props)); + this.add(cloudfront_1.createControlRunbook(this, 'CloudFront.1', props)); + this.add(cloudfront_12.createControlRunbook(this, 'CloudFront.12', props)); + this.add(cloudtrail_1.createControlRunbook(this, 'CloudTrail.1', props)); + this.add(cloudtrail_2.createControlRunbook(this, 'CloudTrail.2', props)); + this.add(cloudtrail_4.createControlRunbook(this, 'CloudTrail.4', props)); + this.add(cloudtrail_5.createControlRunbook(this, 'CloudTrail.5', props)); + this.add(codebuild_2.createControlRunbook(this, 'CodeBuild.2', props)); + this.add(codebuild_5.createControlRunbook(this, 'CodeBuild.5', props)); + this.add(config_1.createControlRunbook(this, 'Config.1', props)); + this.add(ec2_1.createControlRunbook(this, 'EC2.1', props)); + this.add(ec2_2.createControlRunbook(this, 'EC2.2', props)); + this.add(ec2_4.createControlRunbook(this, 'EC2.4', props)); + this.add(ec2_6.createControlRunbook(this, 'EC2.6', props)); + this.add(ec2_7.createControlRunbook(this, 'EC2.7', props)); + this.add(ec2_8.createControlRunbook(this, 'EC2.8', props)); + this.add(ec2_13.createControlRunbook(this, 'EC2.13', props)); + this.add(ec2_15.createControlRunbook(this, 'EC2.15', props)); + this.add(ec2_18.createControlRunbook(this, 'EC2.18', props)); + this.add(ec2_19.createControlRunbook(this, 'EC2.19', props)); + this.add(ec2_23.createControlRunbook(this, 'EC2.23', props)); + this.add(ecr_1.createControlRunbook(this, 'ECR.1', props)); + this.add(guardduty_1.createControlRunbook(this, 'GuardDuty.1', props)); + this.add(iam_3.createControlRunbook(this, 'IAM.3', props)); + this.add(iam_7.createControlRunbook(this, 'IAM.7', props)); + this.add(iam_8.createControlRunbook(this, 'IAM.8', props)); + this.add(kms_4.createControlRunbook(this, 'KMS.4', props)); + this.add(lambda_1.createControlRunbook(this, 'Lambda.1', props)); + this.add(rds_1.createControlRunbook(this, 'RDS.1', props)); + this.add(rds_2.createControlRunbook(this, 'RDS.2', props)); + this.add(rds_4.createControlRunbook(this, 'RDS.4', props)); + this.add(rds_5.createControlRunbook(this, 'RDS.5', props)); + this.add(rds_6.createControlRunbook(this, 'RDS.6', props)); + this.add(rds_7.createControlRunbook(this, 'RDS.7', props)); + this.add(rds_8.createControlRunbook(this, 'RDS.8', props)); + this.add(rds_13.createControlRunbook(this, 'RDS.13', props)); + this.add(rds_16.createControlRunbook(this, 'RDS.16', props)); + this.add(redshift_1.createControlRunbook(this, 'Redshift.1', props)); + this.add(redshift_3.createControlRunbook(this, 'Redshift.3', props)); + this.add(redshift_4.createControlRunbook(this, 'Redshift.4', props)); + this.add(redshift_6.createControlRunbook(this, 'Redshift.6', props)); + this.add(s3_1.createControlRunbook(this, 'S3.1', props)); + this.add(s3_2.createControlRunbook(this, 'S3.2', props)); + this.add(s3_4.createControlRunbook(this, 'S3.4', props)); + this.add(s3_5.createControlRunbook(this, 'S3.5', props)); + this.add(s3_6.createControlRunbook(this, 'S3.6', props)); + this.add(s3_9.createControlRunbook(this, 'S3.9', props)); + this.add(s3_11.createControlRunbook(this, 'S3.11', props)); + this.add(s3_13.createControlRunbook(this, 'S3.13', props)); + this.add(secretsmanager_1.createControlRunbook(this, 'SecretsManager.1', props)); + this.add(secretsmanager_3.createControlRunbook(this, 'SecretsManager.3', props)); + this.add(secretsmanager_4.createControlRunbook(this, 'SecretsManager.4', props)); + this.add(sns_1.createControlRunbook(this, 'SNS.1', props)); + this.add(sns_2.createControlRunbook(this, 'SNS.2', props)); + this.add(sqs_1.createControlRunbook(this, 'SQS.1', props)); + this.add(ssm_4.createControlRunbook(this, 'SSM.4', props)); + } + + protected add(document: ControlRunbookDocument) { + const controlId = document.getControlId(); + const enableParamDescription = this.getEnableParamDescription(controlId); + const enableParamValueAvailable = 'Available'; + const enableParam = new CfnParameter(this, `Enable ${controlId}`, { + type: 'String', + description: enableParamDescription, + default: enableParamValueAvailable, + allowedValues: [enableParamValueAvailable, 'NOT Available'], + }); + + const installSsmDoc = new CfnCondition(this, `Enable ${controlId} Condition`, { + expression: Fn.conditionEquals(enableParam, enableParamValueAvailable), + }); + + document.cfnDocument.cfnOptions.condition = installSsmDoc; + + this.controls.add(document.getControlId()); + } + + protected getEnableParamDescription(controlId: string) { + // eslint-disable-next-line prettier/prettier + return ( + `Enable/disable availability of remediation for ${this.standardLongName} version ` + + `${this.standardVersion} Control ${controlId} in Security Hub Console Custom Actions. If ` + + 'NOT Available the remediation cannot be triggered from the Security Hub console in the ' + + 'Security Hub Admin account.' + ); + } + + public has(controlId: string): boolean { + return this.controls.has(controlId); + } +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_AutoScaling.1.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_AutoScaling.1.ts new file mode 100644 index 00000000..37fb6936 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_AutoScaling.1.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableAutoScalingGroupELBHealthCheckDocument } from '../../SC/ssmdocs/SC_AutoScaling.1'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableAutoScalingGroupELBHealthCheckDocument(stage, id, { ...props, controlId: 'AutoScaling.1' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_CloudFormation.1.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_CloudFormation.1.ts new file mode 100644 index 00000000..043daf5a --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_CloudFormation.1.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { ConfigureSNSTopicForStackDocument } from '../../SC/ssmdocs/SC_CloudFormation.1'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new ConfigureSNSTopicForStackDocument(stage, id, { ...props, controlId: 'CloudFormation.1' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_CloudFront.1.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_CloudFront.1.ts new file mode 100644 index 00000000..a0694b27 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_CloudFront.1.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableCloudFrontDefaultRootObjectDocument } from '../../SC/ssmdocs/SC_CloudFront.1'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableCloudFrontDefaultRootObjectDocument(stage, id, { ...props, controlId: 'CloudFront.1' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_CloudFront.12.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_CloudFront.12.ts new file mode 100644 index 00000000..90f75d95 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_CloudFront.12.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { SetCloudFrontOriginDomainDocument } from '../../SC/ssmdocs/SC_CloudFront.12'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new SetCloudFrontOriginDomainDocument(stage, id, { ...props, controlId: 'CloudFront.12' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_CloudTrail.1.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_CloudTrail.1.ts new file mode 100644 index 00000000..c5e73c42 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_CloudTrail.1.ts @@ -0,0 +1,14 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { CreateCloudTrailMultiRegionTrailDocument } from '../../SC/ssmdocs/SC_CloudTrail.1'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new CreateCloudTrailMultiRegionTrailDocument(stage, id, { + ...props, + controlId: 'CloudTrail.1', + otherControlIds: ['CloudTrail.3'], + }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_CloudTrail.2.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_CloudTrail.2.ts new file mode 100644 index 00000000..7f12d373 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_CloudTrail.2.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableCloudTrailEncryptionDocument } from '../../SC/ssmdocs/SC_CloudTrail.2'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableCloudTrailEncryptionDocument(stage, id, { ...props, controlId: 'CloudTrail.2' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_CloudTrail.4.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_CloudTrail.4.ts new file mode 100644 index 00000000..120db3ee --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_CloudTrail.4.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableCloudTrailLogFileValidationDocument } from '../../SC/ssmdocs/SC_CloudTrail.4'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableCloudTrailLogFileValidationDocument(stage, id, { ...props, controlId: 'CloudTrail.4' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_CloudTrail.5.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_CloudTrail.5.ts new file mode 100644 index 00000000..8733b235 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_CloudTrail.5.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableCloudTrailToCloudWatchLoggingDocument } from '../../SC/ssmdocs/SC_CloudTrail.5'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableCloudTrailToCloudWatchLoggingDocument(stage, id, { ...props, controlId: 'CloudTrail.5' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_CodeBuild.2.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_CodeBuild.2.ts new file mode 100644 index 00000000..c7d385b7 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_CodeBuild.2.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { ReplaceCodeBuildClearTextCredentialsDocument } from '../../SC/ssmdocs/SC_CodeBuild.2'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new ReplaceCodeBuildClearTextCredentialsDocument(stage, id, { ...props, controlId: 'CodeBuild.2' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_CodeBuild.5.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_CodeBuild.5.ts new file mode 100644 index 00000000..944f0813 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_CodeBuild.5.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { RemoveCodeBuildPrivilegedModeDocument } from '../../SC/ssmdocs/SC_CodeBuild.5'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new RemoveCodeBuildPrivilegedModeDocument(stage, id, { ...props, controlId: 'CodeBuild.5' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_Config.1.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_Config.1.ts new file mode 100644 index 00000000..838f0b6e --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_Config.1.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableAWSConfigDocument } from '../../SC/ssmdocs/SC_Config.1'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableAWSConfigDocument(stage, id, { ...props, controlId: 'Config.1' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.1.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.1.ts new file mode 100644 index 00000000..b2c9fff5 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.1.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { MakeEBSSnapshotsPrivateDocument } from '../../SC/ssmdocs/SC_EC2.1'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new MakeEBSSnapshotsPrivateDocument(stage, id, { ...props, controlId: 'EC2.1' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.13.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.13.ts new file mode 100644 index 00000000..27242810 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.13.ts @@ -0,0 +1,14 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { DisablePublicAccessForSecurityGroupDocument } from '../../SC/ssmdocs/SC_EC2.13'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new DisablePublicAccessForSecurityGroupDocument(stage, id, { + ...props, + controlId: 'EC2.13', + otherControlIds: ['EC2.14'], + }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.15.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.15.ts new file mode 100644 index 00000000..8b39213d --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.15.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { DisablePublicIPAutoAssignDocument } from '../../SC/ssmdocs/SC_EC2.15'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new DisablePublicIPAutoAssignDocument(stage, id, { ...props, controlId: 'EC2.15' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.18.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.18.ts new file mode 100644 index 00000000..b321e6e4 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.18.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { RevokeUnauthorizedInboundRulesDocument } from '../../SC/ssmdocs/SC_EC2.18'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new RevokeUnauthorizedInboundRulesDocument(stage, id, { ...props, controlId: 'EC2.18' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.19.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.19.ts new file mode 100644 index 00000000..465cd714 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.19.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { DisableUnrestrictedAccessToHighRiskPortsDocument } from '../../SC/ssmdocs/SC_EC2.19'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new DisableUnrestrictedAccessToHighRiskPortsDocument(stage, id, { ...props, controlId: 'EC2.19' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.2.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.2.ts new file mode 100644 index 00000000..e9dbdafd --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.2.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { RemoveVPCDefaultSecurityGroupRulesDocument } from '../../SC/ssmdocs/SC_EC2.2'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new RemoveVPCDefaultSecurityGroupRulesDocument(stage, id, { ...props, controlId: 'EC2.2' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.23.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.23.ts new file mode 100644 index 00000000..aaa93158 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.23.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { DisableTGWAutoAcceptSharedAttachmentsDocument } from '../../SC/ssmdocs/SC_EC2.23'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new DisableTGWAutoAcceptSharedAttachmentsDocument(stage, id, { ...props, controlId: 'EC2.23' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.4.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.4.ts new file mode 100644 index 00000000..6871c71a --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.4.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { TerminateEC2InstanceDocument } from '../../SC/ssmdocs/SC_EC2.4'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new TerminateEC2InstanceDocument(stage, id, { ...props, controlId: 'EC2.4' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.6.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.6.ts new file mode 100644 index 00000000..8338e1c0 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.6.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableVPCFlowLogsDocument } from '../../SC/ssmdocs/SC_EC2.6'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableVPCFlowLogsDocument(stage, id, { ...props, controlId: 'EC2.6' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.7.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.7.ts new file mode 100644 index 00000000..4c7182ab --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.7.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableEbsEncryptionByDefaultDocument } from '../../SC/ssmdocs/SC_EC2.7'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableEbsEncryptionByDefaultDocument(stage, id, { ...props, controlId: 'EC2.7' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.8.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.8.ts new file mode 100644 index 00000000..6750b5b3 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_EC2.8.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableIMDSV2OnInstance } from '../../SC/ssmdocs/SC_EC2.8'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableIMDSV2OnInstance(stage, id, { ...props, controlId: 'EC2.8' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_ECR.1.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_ECR.1.ts new file mode 100644 index 00000000..94ed513b --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_ECR.1.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnablePrivateRepositoryScanningDocument } from '../../SC/ssmdocs/SC_ECR.1'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnablePrivateRepositoryScanningDocument(stage, id, { ...props, controlId: 'ECR.1' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_GuardDuty.1.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_GuardDuty.1.ts new file mode 100644 index 00000000..0ffe883d --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_GuardDuty.1.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableGuardDutyDocument } from '../../SC/ssmdocs/SC_GuardDuty.1'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableGuardDutyDocument(stage, id, { ...props, controlId: 'GuardDuty.1' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_IAM.3.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_IAM.3.ts new file mode 100644 index 00000000..41e55ef2 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_IAM.3.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { RevokeUnrotatedKeysDocument } from '../../SC/ssmdocs/SC_IAM.3'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new RevokeUnrotatedKeysDocument(stage, id, { ...props, controlId: 'IAM.3' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_IAM.7.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_IAM.7.ts new file mode 100644 index 00000000..5213d12e --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_IAM.7.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { SetIAMPasswordPolicyDocument } from '../../SC/ssmdocs/SC_IAM.7'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new SetIAMPasswordPolicyDocument(stage, id, { ...props, controlId: 'IAM.7' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_IAM.8.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_IAM.8.ts new file mode 100644 index 00000000..b7307c41 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_IAM.8.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { RevokeUnusedIAMUserCredentialsDocument } from '../../SC/ssmdocs/SC_IAM.8'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new RevokeUnusedIAMUserCredentialsDocument(stage, id, { ...props, controlId: 'IAM.8' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_KMS.4.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_KMS.4.ts new file mode 100644 index 00000000..678c4efa --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_KMS.4.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableKeyRotationDocument } from '../../SC/ssmdocs/SC_KMS.4'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableKeyRotationDocument(stage, id, { ...props, controlId: 'KMS.4' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_Lambda.1.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_Lambda.1.ts new file mode 100644 index 00000000..d1f24d64 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_Lambda.1.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { RemoveLambdaPublicAccessDocument } from '../../SC/ssmdocs/SC_Lambda.1'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new RemoveLambdaPublicAccessDocument(stage, id, { ...props, controlId: 'Lambda.1' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.1.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.1.ts new file mode 100644 index 00000000..69273dcc --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.1.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { MakeRDSSnapshotPrivateDocument } from '../../SC/ssmdocs/SC_RDS.1'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new MakeRDSSnapshotPrivateDocument(stage, id, { ...props, controlId: 'RDS.1' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.13.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.13.ts new file mode 100644 index 00000000..56cb9a6c --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.13.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableMinorVersionUpgradeOnRDSDBInstanceDocument } from '../../SC/ssmdocs/SC_RDS.13'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableMinorVersionUpgradeOnRDSDBInstanceDocument(stage, id, { ...props, controlId: 'RDS.13' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.16.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.16.ts new file mode 100644 index 00000000..f13002ce --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.16.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableCopyTagsToSnapshotOnRDSClusterDocument } from '../../SC/ssmdocs/SC_RDS.16'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableCopyTagsToSnapshotOnRDSClusterDocument(stage, id, { ...props, controlId: 'RDS.16' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.2.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.2.ts new file mode 100644 index 00000000..2a0703ce --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.2.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { DisablePublicAccessToRDSInstanceDocument } from '../../SC/ssmdocs/SC_RDS.2'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new DisablePublicAccessToRDSInstanceDocument(stage, id, { ...props, controlId: 'RDS.2' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.4.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.4.ts new file mode 100644 index 00000000..b2eda5ee --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.4.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EncryptRDSSnapshotDocument } from '../../SC/ssmdocs/SC_RDS.4'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EncryptRDSSnapshotDocument(stage, id, { ...props, controlId: 'RDS.4' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.5.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.5.ts new file mode 100644 index 00000000..5920fca9 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.5.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableMultiAZOnRDSInstanceDocument } from '../../SC/ssmdocs/SC_RDS.5'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableMultiAZOnRDSInstanceDocument(stage, id, { ...props, controlId: 'RDS.5' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.6.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.6.ts new file mode 100644 index 00000000..cfd8abc7 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.6.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableEnhancedMonitoringOnRDSInstanceDocument } from '../../SC/ssmdocs/SC_RDS.6'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableEnhancedMonitoringOnRDSInstanceDocument(stage, id, { ...props, controlId: 'RDS.6' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.7.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.7.ts new file mode 100644 index 00000000..39213a56 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.7.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableRDSClusterDeletionProtectionDocument } from '../../SC/ssmdocs/SC_RDS.7'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableRDSClusterDeletionProtectionDocument(stage, id, { ...props, controlId: 'RDS.7' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.8.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.8.ts new file mode 100644 index 00000000..52b19718 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_RDS.8.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableRDSInstanceDeletionProtectionDocument } from '../../SC/ssmdocs/SC_RDS.8'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableRDSInstanceDeletionProtectionDocument(stage, id, { ...props, controlId: 'RDS.8' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_Redshift.1.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_Redshift.1.ts new file mode 100644 index 00000000..045e24e2 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_Redshift.1.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { DisablePublicAccessToRedshiftClusterDocument } from '../../SC/ssmdocs/SC_Redshift.1'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new DisablePublicAccessToRedshiftClusterDocument(stage, id, { ...props, controlId: 'Redshift.1' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_Redshift.3.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_Redshift.3.ts new file mode 100644 index 00000000..0fbc0362 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_Redshift.3.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableAutomaticSnapshotsOnRedshiftClusterDocument } from '../../SC/ssmdocs/SC_Redshift.3'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableAutomaticSnapshotsOnRedshiftClusterDocument(stage, id, { ...props, controlId: 'Redshift.3' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_Redshift.4.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_Redshift.4.ts new file mode 100644 index 00000000..5fa09e01 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_Redshift.4.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableRedshiftClusterAuditLoggingDocument } from '../../SC/ssmdocs/SC_Redshift.4'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableRedshiftClusterAuditLoggingDocument(stage, id, { ...props, controlId: 'Redshift.4' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_Redshift.6.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_Redshift.6.ts new file mode 100644 index 00000000..b88a2cff --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_Redshift.6.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableAutomaticVersionUpgradeOnRedshiftClusterDocument } from '../../SC/ssmdocs/SC_Redshift.6'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableAutomaticVersionUpgradeOnRedshiftClusterDocument(stage, id, { ...props, controlId: 'Redshift.6' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.1.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.1.ts new file mode 100644 index 00000000..a17dc1c5 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.1.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { ConfigureS3PublicAccessBlockDocument } from '../../SC/ssmdocs/SC_S3.1'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new ConfigureS3PublicAccessBlockDocument(stage, id, { ...props, controlId: 'S3.1' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.11.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.11.ts new file mode 100644 index 00000000..d4b4edd1 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.11.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableBucketEventNotificationsDocument } from '../../SC/ssmdocs/SC_S3.11'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableBucketEventNotificationsDocument(stage, id, { ...props, controlId: 'S3.11' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.13.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.13.ts new file mode 100644 index 00000000..93ef2c72 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.13.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { SetS3LifecyclePolicyDocument } from '../../SC/ssmdocs/SC_S3.13'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new SetS3LifecyclePolicyDocument(stage, id, { ...props, controlId: 'S3.13' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.2.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.2.ts new file mode 100644 index 00000000..668ee501 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.2.ts @@ -0,0 +1,14 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { ConfigureS3BucketPublicAccessBlockDocument } from '../../SC/ssmdocs/SC_S3.2'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new ConfigureS3BucketPublicAccessBlockDocument(stage, id, { + ...props, + controlId: 'S3.2', + otherControlIds: ['S3.3', 'S3.8'], + }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.4.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.4.ts new file mode 100644 index 00000000..dfbad3f2 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.4.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableDefaultEncryptionS3Document } from '../../SC/ssmdocs/SC_S3.4'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableDefaultEncryptionS3Document(stage, id, { ...props, controlId: 'S3.4' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.5.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.5.ts new file mode 100644 index 00000000..a5b6f22e --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.5.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { SetSSLBucketPolicyDocument } from '../../SC/ssmdocs/SC_S3.5'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new SetSSLBucketPolicyDocument(stage, id, { ...props, controlId: 'S3.5' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.6.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.6.ts new file mode 100644 index 00000000..3f3d7a55 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.6.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { S3BlockDenylistDocument } from '../../SC/ssmdocs/SC_S3.6'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new S3BlockDenylistDocument(stage, id, { ...props, controlId: 'S3.6' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.9.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.9.ts new file mode 100644 index 00000000..d23fdccf --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_S3.9.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { ConfigureS3BucketLoggingDocument } from '../../SC/ssmdocs/SC_CloudTrail.7'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new ConfigureS3BucketLoggingDocument(stage, id, { ...props, controlId: 'S3.9' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_SNS.1.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_SNS.1.ts new file mode 100644 index 00000000..c6dcc3ae --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_SNS.1.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableEncryptionForSNSTopicDocument } from '../../SC/ssmdocs/SC_SNS.1'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableEncryptionForSNSTopicDocument(stage, id, { ...props, controlId: 'SNS.1' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_SNS.2.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_SNS.2.ts new file mode 100644 index 00000000..b02a0ac3 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_SNS.2.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableDeliveryLoggingForSNSTopicDocument } from '../../SC/ssmdocs/SC_SNS.2'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableDeliveryLoggingForSNSTopicDocument(stage, id, { ...props, controlId: 'SNS.2' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_SQS.1.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_SQS.1.ts new file mode 100644 index 00000000..311491ac --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_SQS.1.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableEncryptionForSQSQueueDocument } from '../../SC/ssmdocs/SC_SQS.1'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableEncryptionForSQSQueueDocument(stage, id, { ...props, controlId: 'SQS.1' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_SSM.4.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_SSM.4.ts new file mode 100644 index 00000000..71fd8be0 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_SSM.4.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { DisablePublicSSMDocument } from '../../SC/ssmdocs/SC_SSM.4'; + +export function createControlRunbook(scope: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new DisablePublicSSMDocument(scope, id, { ...props, controlId: 'SSM.4' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_SecretsManager.1.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_SecretsManager.1.ts new file mode 100644 index 00000000..cb3212ee --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_SecretsManager.1.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { EnableAutoSecretRotationDocument } from '../../SC/ssmdocs/SC_SecretsManager.1'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableAutoSecretRotationDocument(stage, id, { ...props, controlId: 'SecretsManager.1' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_SecretsManager.3.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_SecretsManager.3.ts new file mode 100644 index 00000000..49316a47 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_SecretsManager.3.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { RemoveUnusedSecretDocument } from '../../SC/ssmdocs/SC_SecretsManager.3'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new RemoveUnusedSecretDocument(stage, id, { ...props, controlId: 'SecretsManager.3' }); +} diff --git a/source/playbooks/NIST80053/ssmdocs/NIST80053_SecretsManager.4.ts b/source/playbooks/NIST80053/ssmdocs/NIST80053_SecretsManager.4.ts new file mode 100644 index 00000000..b203b965 --- /dev/null +++ b/source/playbooks/NIST80053/ssmdocs/NIST80053_SecretsManager.4.ts @@ -0,0 +1,10 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../../SC/lib/control_runbooks-construct'; +import { ControlRunbookDocument } from '../../SC/ssmdocs/control_runbook'; +import { UpdateSecretRotationPeriodDocument } from '../../SC/ssmdocs/SC_SecretsManager.4'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new UpdateSecretRotationPeriodDocument(stage, id, { ...props, controlId: 'SecretsManager.4' }); +} diff --git a/source/playbooks/NIST80053/support.txt b/source/playbooks/NIST80053/support.txt new file mode 100644 index 00000000..54b77f55 --- /dev/null +++ b/source/playbooks/NIST80053/support.txt @@ -0,0 +1,3 @@ +NIST SP 800-53 Remediation Playbook - Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the 'License'); You may not use this product template except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0. + +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. diff --git a/source/playbooks/NIST80053/test/__snapshots__/nist_stack.test.ts.snap b/source/playbooks/NIST80053/test/__snapshots__/nist_stack.test.ts.snap new file mode 100644 index 00000000..82c095b6 --- /dev/null +++ b/source/playbooks/NIST80053/test/__snapshots__/nist_stack.test.ts.snap @@ -0,0 +1,25906 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Admin Stack - NIST 1`] = ` +{ + "Description": "test;", + "Mappings": { + "SourceCode": { + "General": { + "KeyPrefix": "automated-security-response-on-aws/v2.1.0", + "S3Bucket": "asrbukkit", + }, + }, + }, + "Parameters": { + "NIST80053R5500Example1AutoTrigger": { + "AllowedValues": [ + "ENABLED", + "DISABLED", + ], + "Default": "DISABLED", + "Description": "This will fully enable automated remediation for NIST80053R5 5.0.0 Example.1", + "Type": "String", + }, + "NIST80053R5500Example3AutoTrigger": { + "AllowedValues": [ + "ENABLED", + "DISABLED", + ], + "Default": "DISABLED", + "Description": "This will fully enable automated remediation for NIST80053R5 5.0.0 Example.3", + "Type": "String", + }, + "NIST80053R5500Example5AutoTrigger": { + "AllowedValues": [ + "ENABLED", + "DISABLED", + ], + "Default": "DISABLED", + "Description": "This will fully enable automated remediation for NIST80053R5 5.0.0 Example.5", + "Type": "String", + }, + "SsmParameterValueSolutionsSO0111OrchestratorArnC96584B6F00A464EAD1953AFF4B05118Parameter": { + "Default": "/Solutions/SO0111/OrchestratorArn", + "Type": "AWS::SSM::Parameter::Value", + }, + }, + "Resources": { + "NIST80053R5Example1AutoEventRule95D936D5": { + "Properties": { + "Description": "Remediate NIST80053R5 5.0.0 Example.1 automatic remediation trigger event rule.", + "EventPattern": { + "detail": { + "findings": { + "Compliance": { + "Status": [ + "FAILED", + "WARNING", + ], + }, + "GeneratorId": [ + "nist-800-53/v/5.0.0/Example.1", + ], + "RecordState": [ + "ACTIVE", + ], + "Workflow": { + "Status": [ + "NEW", + ], + }, + }, + }, + "detail-type": [ + "Security Hub Findings - Imported", + ], + "source": [ + "aws.securityhub", + ], + }, + "Name": "NIST80053R5_5.0.0_Example.1_AutoTrigger", + "State": { + "Ref": "NIST80053R5500Example1AutoTrigger", + }, + "Targets": [ + { + "Arn": { + "Ref": "SsmParameterValueSolutionsSO0111OrchestratorArnC96584B6F00A464EAD1953AFF4B05118Parameter", + }, + "Id": "Target0", + "RoleArn": { + "Fn::GetAtt": [ + "NIST80053R5Example1EventsRuleRoleE952DC2A", + "Arn", + ], + }, + }, + ], + }, + "Type": "AWS::Events::Rule", + }, + "NIST80053R5Example1EventsRuleRoleDefaultPolicyB5007A6E": { + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": "states:StartExecution", + "Effect": "Allow", + "Resource": { + "Ref": "SsmParameterValueSolutionsSO0111OrchestratorArnC96584B6F00A464EAD1953AFF4B05118Parameter", + }, + }, + ], + "Version": "2012-10-17", + }, + "PolicyName": "NIST80053R5Example1EventsRuleRoleDefaultPolicyB5007A6E", + "Roles": [ + { + "Ref": "NIST80053R5Example1EventsRuleRoleE952DC2A", + }, + ], + }, + "Type": "AWS::IAM::Policy", + }, + "NIST80053R5Example1EventsRuleRoleE952DC2A": { + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "events.amazonaws.com", + }, + }, + ], + "Version": "2012-10-17", + }, + }, + "Type": "AWS::IAM::Role", + }, + "NIST80053R5Example3AutoEventRuleE2D204EE": { + "Properties": { + "Description": "Remediate NIST80053R5 5.0.0 Example.3 automatic remediation trigger event rule.", + "EventPattern": { + "detail": { + "findings": { + "Compliance": { + "Status": [ + "FAILED", + "WARNING", + ], + }, + "GeneratorId": [ + "nist-800-53/v/5.0.0/Example.3", + ], + "RecordState": [ + "ACTIVE", + ], + "Workflow": { + "Status": [ + "NEW", + ], + }, + }, + }, + "detail-type": [ + "Security Hub Findings - Imported", + ], + "source": [ + "aws.securityhub", + ], + }, + "Name": "NIST80053R5_5.0.0_Example.3_AutoTrigger", + "State": { + "Ref": "NIST80053R5500Example3AutoTrigger", + }, + "Targets": [ + { + "Arn": { + "Ref": "SsmParameterValueSolutionsSO0111OrchestratorArnC96584B6F00A464EAD1953AFF4B05118Parameter", + }, + "Id": "Target0", + "RoleArn": { + "Fn::GetAtt": [ + "NIST80053R5Example3EventsRuleRoleC1DD4E0D", + "Arn", + ], + }, + }, + ], + }, + "Type": "AWS::Events::Rule", + }, + "NIST80053R5Example3EventsRuleRoleC1DD4E0D": { + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "events.amazonaws.com", + }, + }, + ], + "Version": "2012-10-17", + }, + }, + "Type": "AWS::IAM::Role", + }, + "NIST80053R5Example3EventsRuleRoleDefaultPolicy07768777": { + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": "states:StartExecution", + "Effect": "Allow", + "Resource": { + "Ref": "SsmParameterValueSolutionsSO0111OrchestratorArnC96584B6F00A464EAD1953AFF4B05118Parameter", + }, + }, + ], + "Version": "2012-10-17", + }, + "PolicyName": "NIST80053R5Example3EventsRuleRoleDefaultPolicy07768777", + "Roles": [ + { + "Ref": "NIST80053R5Example3EventsRuleRoleC1DD4E0D", + }, + ], + }, + "Type": "AWS::IAM::Policy", + }, + "NIST80053R5Example5AutoEventRuleF54D616E": { + "Properties": { + "Description": "Remediate NIST80053R5 5.0.0 Example.5 automatic remediation trigger event rule.", + "EventPattern": { + "detail": { + "findings": { + "Compliance": { + "Status": [ + "FAILED", + "WARNING", + ], + }, + "GeneratorId": [ + "nist-800-53/v/5.0.0/Example.5", + ], + "RecordState": [ + "ACTIVE", + ], + "Workflow": { + "Status": [ + "NEW", + ], + }, + }, + }, + "detail-type": [ + "Security Hub Findings - Imported", + ], + "source": [ + "aws.securityhub", + ], + }, + "Name": "NIST80053R5_5.0.0_Example.5_AutoTrigger", + "State": { + "Ref": "NIST80053R5500Example5AutoTrigger", + }, + "Targets": [ + { + "Arn": { + "Ref": "SsmParameterValueSolutionsSO0111OrchestratorArnC96584B6F00A464EAD1953AFF4B05118Parameter", + }, + "Id": "Target0", + "RoleArn": { + "Fn::GetAtt": [ + "NIST80053R5Example5EventsRuleRoleC18202B4", + "Arn", + ], + }, + }, + ], + }, + "Type": "AWS::Events::Rule", + }, + "NIST80053R5Example5EventsRuleRoleC18202B4": { + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "events.amazonaws.com", + }, + }, + ], + "Version": "2012-10-17", + }, + }, + "Type": "AWS::IAM::Role", + }, + "NIST80053R5Example5EventsRuleRoleDefaultPolicyE6EE993F": { + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": "states:StartExecution", + "Effect": "Allow", + "Resource": { + "Ref": "SsmParameterValueSolutionsSO0111OrchestratorArnC96584B6F00A464EAD1953AFF4B05118Parameter", + }, + }, + ], + "Version": "2012-10-17", + }, + "PolicyName": "NIST80053R5Example5EventsRuleRoleDefaultPolicyE6EE993F", + "Roles": [ + { + "Ref": "NIST80053R5Example5EventsRuleRoleC18202B4", + }, + ], + }, + "Type": "AWS::IAM::Policy", + }, + "NIST80053R5ShortNameA0C833E9": { + "Properties": { + "Description": "Provides a short (1-12) character abbreviation for the standard.", + "Name": "/Solutions/SO0111/nist-800-53/5.0.0/shortname", + "Type": "String", + "Value": "NIST80053R5", + }, + "Type": "AWS::SSM::Parameter", + }, + "StandardVersionCB2C6951": { + "Properties": { + "Description": "This parameter controls whether the SHARR step function will process findings for this version of the standard.", + "Name": "/Solutions/SO0111/nist-800-53/5.0.0/status", + "Type": "String", + "Value": "enabled", + }, + "Type": "AWS::SSM::Parameter", + }, + }, +} +`; + +exports[`Member Stack - NIST 1`] = ` +{ + "Conditions": { + "ControlRunbooksEnableAutoScaling1ConditionD5DF4981": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableAutoScaling1851AF8B0", + }, + "Available", + ], + }, + "ControlRunbooksEnableCloudFormation1ConditionD8D32097": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableCloudFormation1B75725BB", + }, + "Available", + ], + }, + "ControlRunbooksEnableCloudFront12Condition59835E00": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableCloudFront12B883E8E5", + }, + "Available", + ], + }, + "ControlRunbooksEnableCloudFront1ConditionD78B5553": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableCloudFront1A6026987", + }, + "Available", + ], + }, + "ControlRunbooksEnableCloudTrail1ConditionB7EBAA86": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableCloudTrail1F0F927F7", + }, + "Available", + ], + }, + "ControlRunbooksEnableCloudTrail2ConditionC182A10F": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableCloudTrail28CC248AB", + }, + "Available", + ], + }, + "ControlRunbooksEnableCloudTrail4Condition587734A2": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableCloudTrail4040C6EAB", + }, + "Available", + ], + }, + "ControlRunbooksEnableCloudTrail5Condition17B6B536": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableCloudTrail52CBFD019", + }, + "Available", + ], + }, + "ControlRunbooksEnableCodeBuild2ConditionB01F473D": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableCodeBuild26FB6E539", + }, + "Available", + ], + }, + "ControlRunbooksEnableCodeBuild5Condition5FF93A0A": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableCodeBuild5144FBB6F", + }, + "Available", + ], + }, + "ControlRunbooksEnableConfig1Condition8CEB8627": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableConfig19F6E6FE3", + }, + "Available", + ], + }, + "ControlRunbooksEnableEC213Condition567EA275": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableEC21349FA0A79", + }, + "Available", + ], + }, + "ControlRunbooksEnableEC215Condition52A7DE4B": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableEC215DA64A549", + }, + "Available", + ], + }, + "ControlRunbooksEnableEC218Condition903B1C90": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableEC21822A124F1", + }, + "Available", + ], + }, + "ControlRunbooksEnableEC219Condition2421DE99": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableEC21919C72DDA", + }, + "Available", + ], + }, + "ControlRunbooksEnableEC21ConditionD4F1277B": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableEC21395C7891", + }, + "Available", + ], + }, + "ControlRunbooksEnableEC223Condition795CB580": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableEC223E02B5464", + }, + "Available", + ], + }, + "ControlRunbooksEnableEC22ConditionB9E0D42E": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableEC22F9B66A60", + }, + "Available", + ], + }, + "ControlRunbooksEnableEC24Condition72408A1B": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableEC2448A9BAD2", + }, + "Available", + ], + }, + "ControlRunbooksEnableEC26ConditionF1F880B0": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableEC265685AB83", + }, + "Available", + ], + }, + "ControlRunbooksEnableEC27ConditionC77CF056": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableEC27108F6303", + }, + "Available", + ], + }, + "ControlRunbooksEnableEC28Condition4C4640B8": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableEC287AE93AB2", + }, + "Available", + ], + }, + "ControlRunbooksEnableECR1Condition70BCAF70": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableECR1CC254C91", + }, + "Available", + ], + }, + "ControlRunbooksEnableGuardDuty1Condition97849740": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableGuardDuty1139BC2DA", + }, + "Available", + ], + }, + "ControlRunbooksEnableIAM3Condition3AA0E892": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableIAM35D05519D", + }, + "Available", + ], + }, + "ControlRunbooksEnableIAM7ConditionDF8E776B": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableIAM766CB4E0A", + }, + "Available", + ], + }, + "ControlRunbooksEnableIAM8Condition9CA5CB4B": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableIAM834577BE3", + }, + "Available", + ], + }, + "ControlRunbooksEnableKMS4Condition710C0C5C": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableKMS415F4485B", + }, + "Available", + ], + }, + "ControlRunbooksEnableLambda1Condition077CECAF": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableLambda11AAE99FF", + }, + "Available", + ], + }, + "ControlRunbooksEnableRDS13Condition0E8A44B3": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableRDS13F10477DD", + }, + "Available", + ], + }, + "ControlRunbooksEnableRDS16ConditionCB5C3E8F": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableRDS16F428962C", + }, + "Available", + ], + }, + "ControlRunbooksEnableRDS1ConditionFAE5B7EA": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableRDS18380A289", + }, + "Available", + ], + }, + "ControlRunbooksEnableRDS2Condition4FD00FE6": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableRDS2004A67EB", + }, + "Available", + ], + }, + "ControlRunbooksEnableRDS4Condition2E89346E": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableRDS4E2A98B6D", + }, + "Available", + ], + }, + "ControlRunbooksEnableRDS5ConditionEC2574C3": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableRDS59E051E8F", + }, + "Available", + ], + }, + "ControlRunbooksEnableRDS6Condition4A60A39B": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableRDS6C46B2207", + }, + "Available", + ], + }, + "ControlRunbooksEnableRDS7ConditionE53509B0": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableRDS7CEA605AE", + }, + "Available", + ], + }, + "ControlRunbooksEnableRDS8Condition8F460AB5": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableRDS8FBE41D2B", + }, + "Available", + ], + }, + "ControlRunbooksEnableRedshift1Condition3449D560": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableRedshift1E5BFAC24", + }, + "Available", + ], + }, + "ControlRunbooksEnableRedshift3ConditionC65BAEF6": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableRedshift39346F065", + }, + "Available", + ], + }, + "ControlRunbooksEnableRedshift4Condition2377F6B5": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableRedshift40FBDF0D8", + }, + "Available", + ], + }, + "ControlRunbooksEnableRedshift6Condition5A51FC97": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableRedshift648AC3FBB", + }, + "Available", + ], + }, + "ControlRunbooksEnableS311Condition6AA79443": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableS3118EE66AFD", + }, + "Available", + ], + }, + "ControlRunbooksEnableS313ConditionA95162A4": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableS3134CFE501B", + }, + "Available", + ], + }, + "ControlRunbooksEnableS31Condition25C33B3F": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableS3116A23B93", + }, + "Available", + ], + }, + "ControlRunbooksEnableS32ConditionD6F8CCE9": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableS325CF1F81C", + }, + "Available", + ], + }, + "ControlRunbooksEnableS34ConditionC23F6623": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableS348078AE21", + }, + "Available", + ], + }, + "ControlRunbooksEnableS35ConditionD5E024B6": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableS35B965D7F6", + }, + "Available", + ], + }, + "ControlRunbooksEnableS36ConditionD22273E2": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableS36B92F84BB", + }, + "Available", + ], + }, + "ControlRunbooksEnableS39Condition7705D6AD": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableS39BC8190C9", + }, + "Available", + ], + }, + "ControlRunbooksEnableSNS1Condition7720D1CC": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableSNS1B5923950", + }, + "Available", + ], + }, + "ControlRunbooksEnableSNS2Condition69621468": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableSNS232380485", + }, + "Available", + ], + }, + "ControlRunbooksEnableSQS1Condition3065B4F2": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableSQS1A400C913", + }, + "Available", + ], + }, + "ControlRunbooksEnableSSM4ConditionD47FCFB5": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableSSM47E37D561", + }, + "Available", + ], + }, + "ControlRunbooksEnableSecretsManager1ConditionCE635AAF": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableSecretsManager10CFF911B", + }, + "Available", + ], + }, + "ControlRunbooksEnableSecretsManager3Condition04E1FFBB": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableSecretsManager3EFC137EE", + }, + "Available", + ], + }, + "ControlRunbooksEnableSecretsManager4ConditionCE71F44A": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableSecretsManager4B15D8607", + }, + "Available", + ], + }, + }, + "Description": "test;", + "Parameters": { + "ControlRunbooksEnableAutoScaling1851AF8B0": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control AutoScaling.1 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableCloudFormation1B75725BB": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control CloudFormation.1 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableCloudFront12B883E8E5": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control CloudFront.12 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableCloudFront1A6026987": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control CloudFront.1 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableCloudTrail1F0F927F7": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control CloudTrail.1 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableCloudTrail28CC248AB": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control CloudTrail.2 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableCloudTrail4040C6EAB": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control CloudTrail.4 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableCloudTrail52CBFD019": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control CloudTrail.5 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableCodeBuild26FB6E539": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control CodeBuild.2 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableCodeBuild5144FBB6F": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control CodeBuild.5 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableConfig19F6E6FE3": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control Config.1 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableEC21349FA0A79": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control EC2.13 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableEC21395C7891": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control EC2.1 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableEC215DA64A549": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control EC2.15 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableEC21822A124F1": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control EC2.18 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableEC21919C72DDA": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control EC2.19 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableEC223E02B5464": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control EC2.23 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableEC22F9B66A60": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control EC2.2 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableEC2448A9BAD2": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control EC2.4 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableEC265685AB83": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control EC2.6 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableEC27108F6303": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control EC2.7 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableEC287AE93AB2": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control EC2.8 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableECR1CC254C91": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control ECR.1 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableGuardDuty1139BC2DA": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control GuardDuty.1 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableIAM35D05519D": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control IAM.3 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableIAM766CB4E0A": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control IAM.7 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableIAM834577BE3": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control IAM.8 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableKMS415F4485B": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control KMS.4 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableLambda11AAE99FF": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control Lambda.1 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableRDS13F10477DD": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control RDS.13 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableRDS16F428962C": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control RDS.16 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableRDS18380A289": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control RDS.1 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableRDS2004A67EB": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control RDS.2 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableRDS4E2A98B6D": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control RDS.4 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableRDS59E051E8F": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control RDS.5 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableRDS6C46B2207": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control RDS.6 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableRDS7CEA605AE": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control RDS.7 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableRDS8FBE41D2B": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control RDS.8 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableRedshift1E5BFAC24": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control Redshift.1 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableRedshift39346F065": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control Redshift.3 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableRedshift40FBDF0D8": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control Redshift.4 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableRedshift648AC3FBB": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control Redshift.6 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableS3116A23B93": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control S3.1 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableS3118EE66AFD": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control S3.11 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableS3134CFE501B": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control S3.13 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableS325CF1F81C": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control S3.2 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableS348078AE21": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control S3.4 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableS35B965D7F6": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control S3.5 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableS36B92F84BB": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control S3.6 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableS39BC8190C9": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control S3.9 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableSNS1B5923950": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control SNS.1 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableSNS232380485": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control SNS.2 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableSQS1A400C913": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control SQS.1 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableSSM47E37D561": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control SSM.4 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableSecretsManager10CFF911B": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control SecretsManager.1 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableSecretsManager3EFC137EE": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control SecretsManager.3 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableSecretsManager4B15D8607": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for nist-800-53 version 5.0.0 Control SecretsManager.4 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "SecHubAdminAccount": { + "AllowedPattern": "^\\d{12}$", + "Description": "Admin account number", + "Type": "String", + }, + "WaitProviderServiceToken": { + "Type": "String", + }, + }, + "Resources": { + "ControlRunbooksAutoScaling1BA109277": { + "Condition": "ControlRunbooksEnableAutoScaling1ConditionD5DF4981", + "DependsOn": [ + "CreateWait0", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_AutoScaling.1 + +## What does this document do? +This document enables ELB healthcheck on a given AutoScaling Group using the [UpdateAutoScalingGroup] API. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* HealthCheckGracePeriod: (Optional) Health check grace period when ELB health check is Enabled +Default: 30 seconds +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP AutoScaling.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-autoscaling-1) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "AutoScaling.1", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):autoscaling:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:autoScalingGroup:(?:[0-9a-fA-F]{8}-(?:[0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12}):autoScalingGroupName/(.{1,255})$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "AutoScalingGroupName", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableAutoScalingGroupELBHealthCheck", + "RuntimeParameters": { + "AutoScalingGroupName": "{{ ParseInput.AutoScalingGroupName }}", + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "ASG health check type updated to ELB", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_AutoScaling.1", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the AutoScaling.1 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableAutoScalingGroupELBHealthCheck", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_AutoScaling.1", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksCloudFormation12CB945DB": { + "Condition": "ControlRunbooksEnableCloudFormation1ConditionD8D32097", + "DependsOn": [ + "CreateWait0", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_CloudFormation.1 + +## What does this document do? +This document configures an SNS topic for notifications from a CloudFormation stack by calling another document. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP v1.0.0 CloudFormation.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudformation-1) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "CloudFormation.1", + ], + "parse_id_pattern": "^(arn:(?:aws|aws-us-gov|aws-cn):cloudformation:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:stack/[a-zA-Z][a-zA-Z0-9-]{0,127}/[a-fA-F0-9]{8}-(?:[a-fA-F0-9]{4}-){3}[a-fA-F0-9]{12})$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "StackArn", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-ConfigureSNSTopicForStack", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "StackArn": "{{ ParseInput.StackArn }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Configured SNS topic for notifications", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_CloudFormation.1", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the CloudFormation.1 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-ConfigureSNSTopicForStack", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_CloudFormation.1", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksCloudFront116F66FF8": { + "Condition": "ControlRunbooksEnableCloudFront1ConditionD78B5553", + "DependsOn": [ + "CreateWait0", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_CloudFront.1 + +## What does this document do? +This document configures a default root object to be returned when visiting a CloudFront distribution. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP v1.0.0 CloudFront.1](https://docs.aws.amazon.com/securityhub/latest/userguide/cloudfront-controls.html#cloudfront-1)", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "CloudFront.1", + ], + "parse_id_pattern": "^(arn:(?:aws|aws-us-gov|aws-cn):cloudfront::\\d{12}:distribution\\/([A-Z0-9]+))$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "CloudFrontDistribution", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableCloudFrontDefaultRootObject", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "CloudFrontDistribution": "{{ ParseInput.CloudFrontDistribution }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Configured default root object for CloudFront distribution", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_CloudFront.1", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the CloudFront.1 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableCloudFrontDefaultRootObject", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_CloudFront.1", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksCloudFront1283E53E96": { + "Condition": "ControlRunbooksEnableCloudFront12Condition59835E00", + "DependsOn": [ + "CreateWait0", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-NIST_800_53_CloudFront.12 +## What does this document do? +This document enables sets the origin domain to a non-existent value to prevent a potential malicious takeover. +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +## Output Parameters +* Remediation.Output - Output from the remediation + +## Documentation Links +* [NIST CloudFront.1](https://docs.aws.amazon.com/securityhub/latest/userguide/cloudfront-controls.html#cloudfront-12) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "CloudFront.12", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):cloudfront::[0-9]{12}:distribution\\/([A-Z0-9]*)$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "DistributionId", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-SetCloudFrontOriginDomain", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "DistributionId": "{{ ParseInput.DistributionId }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Set CloudFront origin domain to safe value.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_CloudFront.12", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the CloudFront.12 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-SetCloudFrontOriginDomain", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_CloudFront.12", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksCloudTrail1B15F1A13": { + "Condition": "ControlRunbooksEnableCloudTrail1ConditionB7EBAA86", + "DependsOn": [ + "CreateWait0", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_CloudTrail.1 +## What does this document do? +Creates a multi-region trail with KMS encryption and enables CloudTrail +Note: this remediation will create a NEW trail. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Documentation Links +* [AWS FSBP CloudTrail.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-1) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "CloudTrail.1", + "CloudTrail.3", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-CreateCloudTrailMultiRegionTrail", + "RuntimeParameters": { + "AWSPartition": "{{ global:AWS_PARTITION }}", + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Multi-region, encrypted AWS CloudTrail successfully created", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_CloudTrail.1", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the CloudTrail.1 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-CreateCloudTrailMultiRegionTrail", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_CloudTrail.1", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksCloudTrail2979D0B5D": { + "Condition": "ControlRunbooksEnableCloudTrail2ConditionC182A10F", + "DependsOn": [ + "CreateWait1", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_CloudTrail.2 +## What does this document do? +This document enables SSE KMS encryption for log files using the ASR remediation KMS CMK +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +## Output Parameters +* Remediation.Output - Output from the remediation + +## Documentation Links +* [AWS FSBP CloudTrail.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-2) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "CloudTrail.2", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "TrailArn", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableCloudTrailEncryption", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "TrailArn": "{{ ParseInput.TrailArn }}", + "TrailRegion": "{{ ParseInput.RemediationRegion }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Encryption enabled on CloudTrail", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_CloudTrail.2", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the CloudTrail.2 finding", + "type": "StringMap", + }, + "KMSKeyArn": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):kms:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:(?:(?:alias\\/[A-Za-z0-9/_-])|(?:key\\/(?:[0-9a-fA-F]{8}-(?:[0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12})))$", + "default": "{{ssm:/Solutions/SO0111/CMK_REMEDIATION_ARN}}", + "type": "String", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableCloudTrailEncryption", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_CloudTrail.2", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksCloudTrail4057F669F": { + "Condition": "ControlRunbooksEnableCloudTrail4Condition587734A2", + "DependsOn": [ + "CreateWait1", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_CloudTrail.4 + +## What does this document do? +This document enables CloudTrail log file validation. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP v1.0.0 CloudTrail.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-4) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "CloudTrail.4", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):cloudtrail:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:trail\\/([A-Za-z0-9._-]{3,128})$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "TrailName", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableCloudTrailLogFileValidation", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "TrailName": "{{ ParseInput.TrailName }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Enabled CloudTrail log file validation.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_CloudTrail.4", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the CloudTrail.4 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableCloudTrailLogFileValidation", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_CloudTrail.4", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksCloudTrail54F5ED8E4": { + "Condition": "ControlRunbooksEnableCloudTrail5Condition17B6B536", + "DependsOn": [ + "CreateWait1", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_CloudTrail.5 + +## What does this document do? +This document configures CloudTrail to log to CloudWatch Logs. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output - Remediation results + +## Documentation Links +* [AWS FSBP v1.0.0 CloudTrail.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-5) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "CloudTrail.5", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):cloudtrail:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:trail\\/([A-Za-z0-9._-]{3,128})$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "TrailName", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableCloudTrailToCloudWatchLogging", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "CloudWatchLogsRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/SO0111-CloudTrailToCloudWatchLogs", + "LogGroupName": "CloudTrail/{{ ParseInput.TrailName }}", + "TrailName": "{{ ParseInput.TrailName }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Configured CloudTrail logging to CloudWatch Logs Group CloudTrail/{{ ParseInput.TrailName }}", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_CloudTrail.5", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the CloudTrail.5 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableCloudTrailToCloudWatchLogging", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_CloudTrail.5", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksCodeBuild2A2751671": { + "Condition": "ControlRunbooksEnableCodeBuild2ConditionB01F473D", + "DependsOn": [ + "CreateWait1", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_CodeBuild.2 + +## What does this document do? +This document removes CodeBuild project environment variables containing clear text credentials and replaces them with Amazon EC2 Systems Manager Parameters. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP v1.0.0 CodeBuild.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-codebuild-2) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "CodeBuild.2", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):codebuild:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:project\\/([A-Za-z0-9][A-Za-z0-9\\-_]{1,254})$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "ProjectName", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-ReplaceCodeBuildClearTextCredentials", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "ProjectName": "{{ ParseInput.ProjectName }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Replaced clear text credentials with SSM parameters.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_CodeBuild.2", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the CodeBuild.2 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-ReplaceCodeBuildClearTextCredentials", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_CodeBuild.2", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksCodeBuild509682556": { + "Condition": "ControlRunbooksEnableCodeBuild5Condition5FF93A0A", + "DependsOn": [ + "CreateWait1", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_CodeBuild.5 + +## What does this document do? +This document removes CodeBuild project privileged mode to remove a build project's Docker container access to all devices. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP v1.0.0 CodeBuild.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-codebuild-5) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "CodeBuild.5", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):codebuild:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:project\\/([A-Za-z0-9][A-Za-z0-9\\-_]{1,254})$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "ProjectName", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-RemoveCodeBuildPrivilegedMode", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "ProjectName": "{{ ParseInput.ProjectName }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Removed CodeBuild privileged status.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_CodeBuild.5", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the CodeBuild.5 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-RemoveCodeBuildPrivilegedMode", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_CodeBuild.5", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksConfig1512B566F": { + "Condition": "ControlRunbooksEnableConfig1Condition8CEB8627", + "DependsOn": [ + "CreateWait2", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_Config.1 +## What does this document do? +Enables AWS Config: +* Turns on recording for all resources. +* Creates an encrypted bucket for Config logging. +* Creates a logging bucket for access logs for the config bucket +* Creates an SNS topic for Config notifications +* Creates a service-linked role + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Documentation Links +* [AWS FSBP Config.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-config-1) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "Config.1", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableAWSConfig", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "KMSKeyArn": "{{ KMSKeyArn }}", + "SNSTopicName": "SO0111-SHARR-AWSConfigNotification", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "AWS Config enabled", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_Config.1", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the Config.1 finding", + "type": "StringMap", + }, + "KMSKeyArn": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):kms:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:(?:(?:alias\\/[A-Za-z0-9/-_])|(?:key\\/(?:[0-9a-fA-F]{8}-(?:[0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12})))$", + "default": "{{ssm:/Solutions/SO0111/CMK_REMEDIATION_ARN}}", + "description": "The ARN of the KMS key created by ASR for remediations", + "type": "String", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableAWSConfig", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_Config.1", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksEC213D7C9C1EB": { + "Condition": "ControlRunbooksEnableEC213Condition567EA275", + "DependsOn": [ + "CreateWait3", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-PCI_3.2.1_EC2.5 + +## What does this document do? +Removes public access to remove server administrative ports from an EC2 Security Group + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output - Output of AWS-DisablePublicAccessForSecurityGroup runbook. + +## Documentation Links +* [PCI v3.2.1 EC2.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-pci-controls.html#pcidss-ec2-5) +* [CIS v1.2.0 4.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-4.1) +* [CIS v1.2.0 4.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-4.2) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "EC2.13", + "EC2.14", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:security-group\\/(sg-[a-f\\d]{8,17})$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "GroupId", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "AWS-DisablePublicAccessForSecurityGroup", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "GroupId": "{{ ParseInput.GroupId }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Disabled public access to administrative ports in the security group {{ ParseInput.GroupId }}.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_EC2.13", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the EC2.13 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-DisablePublicAccessForSecurityGroup", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_EC2.13", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksEC214D3BB404": { + "Condition": "ControlRunbooksEnableEC21ConditionD4F1277B", + "DependsOn": [ + "CreateWait2", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.1 +## What does this document do? +This document changes all public EC2 snapshots to private + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Documentation Links +* [AWS FSBP EC2.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-1) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "EC2.1", + ], + "parse_id_pattern": "", + "resource_index": 2, + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "TestMode", + "Selector": "$.Payload.testmode", + "Type": "Boolean", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-MakeEBSSnapshotsPrivate", + "RuntimeParameters": { + "AccountId": "{{ ParseInput.RemediationAccount }}", + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "TestMode": "{{ ParseInput.TestMode }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "EBS Snapshot modified to private", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_EC2.1", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the EC2.1 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-MakeEBSSnapshotsPrivate", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_EC2.1", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksEC2153B43E7A8": { + "Condition": "ControlRunbooksEnableEC215Condition52A7DE4B", + "DependsOn": [ + "CreateWait3", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.15 + +## What does this document do? +This document disables auto assignment of public IP addresses on a subnet. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP v1.0.0 EC2.15](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-15)", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "EC2.15", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "SubnetARN", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-DisablePublicIPAutoAssign", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "SubnetARN": "{{ ParseInput.SubnetARN }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Disabled public IP auto assignment for subnet.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_EC2.15", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the EC2.15 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-DisablePublicIPAutoAssign", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_EC2.15", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksEC218DB9589DD": { + "Condition": "ControlRunbooksEnableEC218Condition903B1C90", + "DependsOn": [ + "CreateWait3", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.18 + +## What does this document do? +This document revokes inbound security group rules that allow unrestricted access to ports that are not authorized. +Authorized ports are listed in authorizedTcpPorts and authorizedUdpPorts parameters. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AFSBP v1.0.0 EC2.18](https://docs.aws.amazon.com/securityhub/latest/userguide/ec2-controls.html#ec2-18)", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "EC2.18", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:security-group/(sg-[0-9a-f]*)$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "SecurityGroupId", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeScript", + "inputs": { + "Handler": "get_input_params", + "InputPayload": { + "DefaultParams": { + "authorizedTcpPorts": [ + "80", + "443", + ], + "authorizedUdpPorts": [], + }, + "SecHubInputParams": "{{ ParseInput.InputParams }}", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import re + + +def parse_non_string_types(param): + if re.match("^\\d+$", str(param)): + param = int(param) + return param + if param == "true" or param == "True": + return True + if param == "false" or param == "False": + return False + if isinstance(param, list): + return param + if len(param.split(",")) > 1: + return param.split(",") + return param + + +def get_input_params(event, _): + security_hub_input_params = event["SecHubInputParams"] + + default_params = event["DefaultParams"] + + input_params = {} + + for param in default_params: + if param in security_hub_input_params: + converted_param = parse_non_string_types(security_hub_input_params[param]) + input_params[param] = converted_param + else: + converted_param = parse_non_string_types(default_params[param]) + input_params[param] = converted_param + + return input_params +", + }, + "name": "GetInputParams", + "outputs": [ + { + "Name": "authorizedTcpPorts", + "Selector": "$.Payload.authorizedTcpPorts", + "Type": "StringList", + }, + { + "Name": "authorizedUdpPorts", + "Selector": "$.Payload.authorizedUdpPorts", + "Type": "StringList", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-RevokeUnauthorizedInboundRules", + "RuntimeParameters": { + "AuthorizedTcpPorts": "{{ GetInputParams.authorizedTcpPorts }}", + "AuthorizedUdpPorts": "{{ GetInputParams.authorizedUdpPorts }}", + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "SecurityGroupId": "{{ ParseInput.SecurityGroupId }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Revoked unrestricted inbound security group rules on unauthorized ports.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_EC2.18", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the EC2.18 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-RevokeUnauthorizedInboundRules", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_EC2.18", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksEC2197047C726": { + "Condition": "ControlRunbooksEnableEC219Condition2421DE99", + "DependsOn": [ + "CreateWait4", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.19 + +## What does this document do? +This document disables unrestricted access to high risk ports. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AFSBP v1.0.0 EC2.19](https://docs.aws.amazon.com/securityhub/latest/userguide/ec2-controls.html#ec2-19)", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "EC2.19", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:security-group/(sg-[0-9a-f]*)$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "SecurityGroupId", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-DisableUnrestrictedAccessToHighRiskPorts", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "SecurityGroupId": "{{ ParseInput.SecurityGroupId }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Revoking access to high risk ports.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_EC2.19", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the EC2.19 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-DisableUnrestrictedAccessToHighRiskPorts", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_EC2.19", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksEC223EAFC5818": { + "Condition": "ControlRunbooksEnableEC223Condition795CB580", + "DependsOn": [ + "CreateWait4", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.23 + +## What does this document do? +This document turns off AutoAcceptSharedAttachments on a transit gateway to ensure that only authorized VPC attachment requests are accepted. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AFSBP v1.0.0 EC2.23](https://docs.aws.amazon.com/securityhub/latest/userguide/ec2-controls.html#ec2-23)", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "EC2.23", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):ec2:[a-z]{2}-[a-z]+-\\d{1}:\\d{12}:transit-gateway\\/(tgw-[a-z0-9\\-]+)$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "TransitGatewayId", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-DisableTGWAutoAcceptSharedAttachments", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "TransitGatewayId": "{{ ParseInput.TransitGatewayId }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Disabling Transit Gateway from automatically accepting VPC attachment requests.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_EC2.23", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the EC2.23 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-DisableTGWAutoAcceptSharedAttachments", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_EC2.23", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksEC22ED852ADF": { + "Condition": "ControlRunbooksEnableEC22ConditionB9E0D42E", + "DependsOn": [ + "CreateWait2", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.2 + +## What does this document do? +This document deletes ingress and egress rules from default security +group using the AWS SSM Runbook AWSConfigRemediation-RemoveVPCDefaultSecurityGroupRules + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output - Output from AWSConfigRemediation-RemoveVPCDefaultSecurityGroupRules SSM doc + +## Documentation Links +* [AWS FSBP EC2.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-2) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "EC2.2", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:security-group\\/(sg-[0-9a-f]*)$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "GroupId", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-RemoveVPCDefaultSecurityGroupRules", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "GroupId": "{{ ParseInput.GroupId }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Removed rules on default security group", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_EC2.2", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the EC2.2 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-RemoveVPCDefaultSecurityGroupRules", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_EC2.2", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksEC247C182546": { + "Condition": "ControlRunbooksEnableEC24Condition72408A1B", + "DependsOn": [ + "CreateWait2", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.4 + +## What does this document do? +This document terminates an EC2 instance if it has been stopped for longer than the allowed number of days defined by the AllowedDays parameter. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AFSBP v1.0.0 EC2.4](https://docs.aws.amazon.com/securityhub/latest/userguide/ec2-controls.html#ec2-4)", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "EC2.4", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:instance\\/(i-[0-9a-f]*)$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "InstanceId", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "AWS-TerminateEC2Instance", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "InstanceId": [ + "{{ ParseInput.InstanceId }}", + ], + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Terminated EC2 instance.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_EC2.4", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the EC2.4 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-TerminateEC2Instance", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_EC2.4", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksEC267E3087AE": { + "Condition": "ControlRunbooksEnableEC26ConditionF1F880B0", + "DependsOn": [ + "CreateWait2", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.6 + +## What does this document do? +Enables VPC Flow Logs for a VPC + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output - Remediation results + +## Documentation Links +* [AWS FSBP EC2.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-6) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "EC2.6", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):ec2:.*:\\d{12}:vpc\\/(vpc-[0-9a-f]{8,17})$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "VPC", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableVPCFlowLogs", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "RemediationRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/SO0111-EnableVPCFlowLogs-remediationRole", + "VPC": "{{ ParseInput.VPC }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Enabled VPC Flow logging.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_EC2.6", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the EC2.6 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableVPCFlowLogs", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_EC2.6", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksEC277719A4CD": { + "Condition": "ControlRunbooksEnableEC27ConditionC77CF056", + "DependsOn": [ + "CreateWait3", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.7 +## What does this document do? +This document enables \`EBS Encryption by default\` for an AWS account in the current region by calling another SSM document +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP EC2.7](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-7) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "EC2.7", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableEbsEncryptionByDefault", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Enabled EBS encryption by default", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_EC2.7", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the EC2.7 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableEbsEncryptionByDefault", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_EC2.7", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksEC287C39A9F1": { + "Condition": "ControlRunbooksEnableEC28Condition4C4640B8", + "DependsOn": [ + "CreateWait3", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.8 +## What does this document do? +This document enables IMDSv2 on an Instance for an AWS account in the current region by calling another SSM document. +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP EC2.8](https://docs.aws.amazon.com/securityhub/latest/userguide/ec2-controls.html#ec2-8) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "EC2.8", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "InstanceARN", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableIMDSV2OnInstance", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "InstanceARN": "{{ ParseInput.InstanceARN }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Enabled IMDSv2 on Instance", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_EC2.8", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the EC2.8 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableIMDSV2OnInstance", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_EC2.8", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksECR16DEF82C5": { + "Condition": "ControlRunbooksEnableECR1Condition70BCAF70", + "DependsOn": [ + "CreateWait4", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_ECR.1 + +## What does this document do? +This document enables image scanning configuration on a private ECR repository. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AFSBP v1.0.0 ECR.1](https://docs.aws.amazon.com/securityhub/latest/userguide/ecr-controls.html#ecr-1)", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "ECR.1", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):ecr:[a-z]{2}-[a-z]+-\\d{1}:\\d{12}:repository\\/([a-z0-9._\\/\\-]+)$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "RepositoryName", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnablePrivateRepositoryScanning", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "RepositoryName": "{{ ParseInput.RepositoryName }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Enabling image scanning for private ECR repository.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_ECR.1", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the ECR.1 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnablePrivateRepositoryScanning", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_ECR.1", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksGuardDuty15E0D2BEA": { + "Condition": "ControlRunbooksEnableGuardDuty1Condition97849740", + "DependsOn": [ + "CreateWait4", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_GuardDuty.1 + +## What does this document do? +This document enables GuardDuty. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP v1.0.0 GuardDuty.1](https://docs.aws.amazon.com/securityhub/latest/userguide/guardduty-controls.html#guardduty-1)", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "GuardDuty.1", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableGuardDuty", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Amazon GuardDuty enabled.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_GuardDuty.1", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the GuardDuty.1 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableGuardDuty", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_GuardDuty.1", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksIAM3DC25477E": { + "Condition": "ControlRunbooksEnableIAM3Condition3AA0E892", + "DependsOn": [ + "CreateWait4", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_IAM.3 + +## What does this document do? +This document disables active keys that have not been rotated for more than 90 days. Note that this remediation is **DISRUPTIVE**. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP v1.0.0 IAM.3](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-iam-3) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "IAM.3", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):iam::\\d{12}:user(?:(?:\\u002F)|(?:\\u002F[\\u0021-\\u007F]{1,510}\\u002F))([\\w+=,.@-]{1,64})$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "IAMUser", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "IAMResourceId", + "Selector": "$.Payload.details.AwsIamUser.UserId", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-RevokeUnrotatedKeys", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "IAMResourceId": "{{ ParseInput.IAMResourceId }}", + "MaxCredentialUsageAge": "{{ MaxCredentialUsageAge }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Deactivated unrotated keys for {{ ParseInput.IAMUser }}.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_IAM.3", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the IAM.3 finding", + "type": "StringMap", + }, + "MaxCredentialUsageAge": { + "allowedPattern": "^(?:[1-9]\\d{0,3}|10000)$", + "default": "90", + "description": "(Required) Maximum number of days a key can be unrotated. The default value is 90 days.", + "type": "String", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-RevokeUnrotatedKeys", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_IAM.3", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksIAM70A808F7C": { + "Condition": "ControlRunbooksEnableIAM7ConditionDF8E776B", + "DependsOn": [ + "CreateWait5", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_IAM.7 + +## What does this document do? +This document establishes a default password policy. + +## Security Standards and Controls +* AWS FSBP IAM.7 + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP IAM.7](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-iam-7) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "IAM.7", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + ], + }, + { + "action": "aws:executeScript", + "inputs": { + "Handler": "get_input_params", + "InputPayload": { + "DefaultParams": { + "AllowUsersToChangePassword": "True", + "HardExpiry": "True", + "MaxPasswordAge": "90", + "MinimumPasswordLength": "14", + "PasswordReusePrevention": "24", + "RequireLowercaseCharacters": "True", + "RequireNumbers": "True", + "RequireSymbols": "True", + "RequireUppercaseCharacters": "True", + }, + "SecHubInputParams": "{{ ParseInput.InputParams }}", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import re + + +def parse_non_string_types(param): + if re.match("^\\d+$", str(param)): + param = int(param) + return param + if param == "true" or param == "True": + return True + if param == "false" or param == "False": + return False + if isinstance(param, list): + return param + if len(param.split(",")) > 1: + return param.split(",") + return param + + +def get_input_params(event, _): + security_hub_input_params = event["SecHubInputParams"] + + default_params = event["DefaultParams"] + + input_params = {} + + for param in default_params: + if param in security_hub_input_params: + converted_param = parse_non_string_types(security_hub_input_params[param]) + input_params[param] = converted_param + else: + converted_param = parse_non_string_types(default_params[param]) + input_params[param] = converted_param + + return input_params +", + }, + "name": "GetInputParams", + "outputs": [ + { + "Name": "AllowUsersToChangePassword", + "Selector": "$.Payload.AllowUsersToChangePassword", + "Type": "Boolean", + }, + { + "Name": "HardExpiry", + "Selector": "$.Payload.HardExpiry", + "Type": "Boolean", + }, + { + "Name": "MaxPasswordAge", + "Selector": "$.Payload.MaxPasswordAge", + "Type": "Integer", + }, + { + "Name": "MinimumPasswordLength", + "Selector": "$.Payload.MinimumPasswordLength", + "Type": "Integer", + }, + { + "Name": "RequireSymbols", + "Selector": "$.Payload.RequireSymbols", + "Type": "Boolean", + }, + { + "Name": "RequireNumbers", + "Selector": "$.Payload.RequireNumbers", + "Type": "Boolean", + }, + { + "Name": "RequireUppercaseCharacters", + "Selector": "$.Payload.RequireUppercaseCharacters", + "Type": "Boolean", + }, + { + "Name": "RequireLowercaseCharacters", + "Selector": "$.Payload.RequireLowercaseCharacters", + "Type": "Boolean", + }, + { + "Name": "PasswordReusePrevention", + "Selector": "$.Payload.PasswordReusePrevention", + "Type": "Integer", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-SetIAMPasswordPolicy", + "RuntimeParameters": { + "AllowUsersToChangePassword": "{{ GetInputParams.AllowUsersToChangePassword }}", + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "HardExpiry": "{{ GetInputParams.HardExpiry }}", + "MaxPasswordAge": "{{ GetInputParams.MaxPasswordAge }}", + "MinimumPasswordLength": "{{ GetInputParams.MinimumPasswordLength }}", + "PasswordReusePrevention": "{{ GetInputParams.PasswordReusePrevention }}", + "RequireLowercaseCharacters": "{{ GetInputParams.RequireLowercaseCharacters }}", + "RequireNumbers": "{{ GetInputParams.RequireNumbers }}", + "RequireSymbols": "{{ GetInputParams.RequireSymbols }}", + "RequireUppercaseCharacters": "{{ GetInputParams.RequireUppercaseCharacters }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Established a baseline password policy using the ASR-SetIAMPasswordPolicy runbook.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_IAM.7", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the IAM.7 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-SetIAMPasswordPolicy", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_IAM.7", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksIAM8632E03ED": { + "Condition": "ControlRunbooksEnableIAM8Condition9CA5CB4B", + "DependsOn": [ + "CreateWait5", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_IAM.8 + +## What does this document do? +This document ensures that credentials unused for 90 days or greater are disabled. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output - Output of remediation runbook + +SEE AWSConfigRemediation-RevokeUnusedIAMUserCredentials + +## Documentation Links +* [AWS FSBP IAM.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-iam-8) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "IAM.8", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "IAMResourceId", + "Selector": "$.Payload.details.AwsIamUser.UserId", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-RevokeUnusedIAMUserCredentials", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "IAMResourceId": "{{ ParseInput.IAMResourceId }}", + "MaxCredentialUsageAge": "90", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Deactivated unused keys and expired logins using the ASR-RevokeUnusedIAMUserCredentials runbook.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_IAM.8", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the IAM.8 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-RevokeUnusedIAMUserCredentials", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_IAM.8", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksKMS41A22BB8D": { + "Condition": "ControlRunbooksEnableKMS4Condition710C0C5C", + "DependsOn": [ + "CreateWait5", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-CIS_1.2.0_2.8 + +## What does this document do? +Enables rotation for customer-managed KMS keys. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output - Remediation results + +## Documentation Links +* [CIS v1.2.0 2.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-2.8) +* [PCI v3.2.1 PCI.KMS.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-pci-controls.html#pcidss-kms-1) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "KMS.4", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):kms:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:key\\/([A-Za-z0-9-]{36})$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "KeyId", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableKeyRotation", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "KeyId": "{{ ParseInput.KeyId }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Enabled KMS Customer Managed Key rotation for {{ ParseInput.KeyId }}", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_KMS.4", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the KMS.4 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableKeyRotation", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_KMS.4", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksLambda1F6ECACF8": { + "Condition": "ControlRunbooksEnableLambda1Condition077CECAF", + "DependsOn": [ + "CreateWait5", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_Lambda.1 + +## What does this document do? +This document removes the public resource policy. A public resource policy +contains a principal "*" or AWS: "*", which allows public access to the +function. The remediation is to remove the SID of the public policy. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Documentation Links +* [AWS FSBP Lambda.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-lambda-1) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "Lambda.1", + ], + "parse_id_pattern": "^arn:(?:aws|aws-us-gov|aws-cn):lambda:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:function:([a-zA-Z0-9\\-_]{1,64})$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "FunctionName", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-RemoveLambdaPublicAccess", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "FunctionName": "{{ ParseInput.FunctionName }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Lamdba {{ ParseInput.FunctionName }} policy updated to remove public access", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_Lambda.1", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the Lambda.1 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-RemoveLambdaPublicAccess", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_Lambda.1", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksRDS13FCEA51BD": { + "Condition": "ControlRunbooksEnableRDS13Condition0E8A44B3", + "DependsOn": [ + "CreateWait7", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_RDS.13 + +## What does this document do? +This document enables \`Auto minor version upgrade\` on a given Amazon RDS instance by calling another SSM document. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output - The standard HTTP response from the ModifyDBInstance API. + +## Documentation Links +* [AWS FSBP RDS.13](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-13) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "RDS.13", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + { + "Name": "DBInstanceIdentifier", + "Selector": "$.Payload.resource.Details.AwsRdsDbInstance.DBInstanceIdentifier", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableMinorVersionUpgradeOnRDSDBInstance", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "DBInstanceIdentifier": "{{ ParseInput.DBInstanceIdentifier }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Minor Version enabled on the RDS Instance or Multi-AZ RDS Cluster.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_RDS.13", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the RDS.13 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableMinorVersionUpgradeOnRDSDBInstance", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_RDS.13", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksRDS16EB04DCBF": { + "Condition": "ControlRunbooksEnableRDS16ConditionCB5C3E8F", + "DependsOn": [ + "CreateWait7", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_RDS.16 + +## What does this document do? +This document enables \`Copy tags to snapshots\` on a given Amazon RDS cluster by calling another SSM document. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output - The standard HTTP response from the ModifyDBCluster API. + +## Documentation Links +* [AWS FSBP RDS.16](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-16) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "RDS.16", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + { + "Name": "DbiResourceId", + "Selector": "$.Payload.details.AwsRdsDbCluster.DbClusterResourceId", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableCopyTagsToSnapshotOnRDSCluster", + "RuntimeParameters": { + "ApplyImmediately": true, + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "DbClusterResourceId": "{{ ParseInput.DbiResourceId }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Copy Tags to Snapshots enabled on RDS DB cluster", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_RDS.16", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the RDS.16 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableCopyTagsToSnapshotOnRDSCluster", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_RDS.16", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksRDS1D73701E9": { + "Condition": "ControlRunbooksEnableRDS1ConditionFAE5B7EA", + "DependsOn": [ + "CreateWait5", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_RDS.1 +## What does this document do? +This document changes public RDS snapshot to private + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Documentation Links +* [AWS FSBP RDS.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-1) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "RDS.1", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:(cluster-snapshot|snapshot):([a-zA-Z][0-9a-zA-Z]*(?:-[0-9a-zA-Z]+)*)$", + "resource_index": 2, + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "DBSnapshotId", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + { + "Name": "DBSnapshotType", + "Selector": "$.Payload.matches[0]", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-MakeRDSSnapshotPrivate", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "DBSnapshotId": "{{ ParseInput.DBSnapshotId }}", + "DBSnapshotType": "{{ ParseInput.DBSnapshotType }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "RDS DB Snapshot modified to private", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_RDS.1", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the RDS.1 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-MakeRDSSnapshotPrivate", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_RDS.1", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksRDS2FBE04686": { + "Condition": "ControlRunbooksEnableRDS2Condition4FD00FE6", + "DependsOn": [ + "CreateWait6", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_RDS.2 +## What does this document do? +This document disables public access to RDS instances by calling another SSM document + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Documentation Links +* [AWS FSBP RDS.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-2) + +## Troubleshooting +* ModifyDBInstance isn't supported for a DB instance in a Multi-AZ DB Cluster. + - This remediation will not work on an instance within a MySQL or PostgreSQL Multi-AZ Cluster due to limitations with the RDS API. +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "RDS.2", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:db:((?!.*--.*)(?!.*-$)[a-z][a-z0-9-]{0,62})$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + { + "Name": "DbiResourceId", + "Selector": "$.Payload.resource.Details.AwsRdsDbInstance.DbiResourceId", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-DisablePublicAccessToRDSInstance", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "DbiResourceId": "{{ ParseInput.DbiResourceId }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Disabled public access to RDS instance", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_RDS.2", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the RDS.2 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-DisablePublicAccessToRDSInstance", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_RDS.2", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksRDS4C82F2410": { + "Condition": "ControlRunbooksEnableRDS4Condition2E89346E", + "DependsOn": [ + "CreateWait6", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_RDS.4 + +## What does this document do? +This document encrypts an unencrypted RDS snapshot by calling another SSM document + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Optional) The ARN of the role that allows Automation to perform the actions on your behalf. +* RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. +* KMSKeyId: (Optional) ID, ARN or Alias for the AWS KMS Customer-Managed Key (CMK) to use to encrypt the snapshot. + +## Documentation Links +* [AWS FSBP RDS.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-4) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "RDS.4", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:((?:cluster-)?snapshot|dbclustersnapshot):((?:rds:|awsbackup:)?((?!.*--.*)(?!.*-$)[a-zA-Z][a-zA-Z0-9-]{0,254}))$", + "resource_index": 2, + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + { + "Name": "SourceDBSnapshotIdentifier", + "Selector": "$.Payload.matches[1]", + "Type": "String", + }, + { + "Name": "SourceDBSnapshotIdentifierNoPrefix", + "Selector": "$.Payload.matches[2]", + "Type": "String", + }, + { + "Name": "DBSnapshotType", + "Selector": "$.Payload.matches[0]", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EncryptRDSSnapshot", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "DBSnapshotType": "{{ ParseInput.DBSnapshotType }}", + "KmsKeyId": "{{ KMSKeyId }}", + "SourceDBSnapshotIdentifier": "{{ ParseInput.SourceDBSnapshotIdentifier }}", + "TargetDBSnapshotIdentifier": "{{ ParseInput.SourceDBSnapshotIdentifierNoPrefix }}-encrypted", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Encrypted RDS snapshot", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_RDS.4", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the RDS.4 finding", + "type": "StringMap", + }, + "KMSKeyId": { + "allowedPattern": "^(?:arn:(?:aws|aws-us-gov|aws-cn):kms:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:)?(?:(?:alias\\/[A-Za-z0-9/_-]+)|(?:key\\/(?:[0-9a-fA-F]{8}-(?:[0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12})))$", + "default": "alias/aws/rds", + "description": "(Optional) ID, ARN or Alias for the AWS KMS Customer-Managed Key (CMK) to use to encrypt the snapshot.", + "type": "String", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EncryptRDSSnapshot", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_RDS.4", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksRDS5CECD9314": { + "Condition": "ControlRunbooksEnableRDS5ConditionEC2574C3", + "DependsOn": [ + "CreateWait6", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.RDS.5 + +## What does this document do? +This document configures an RDS DB instance for multiple Availability Zones by calling another SSM document. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Optional) The ARN of the role that allows Automation to perform the actions on your behalf. +* RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. + +## Documentation Links +* [AWS FSBP RDS.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-5) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "RDS.5", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + { + "Name": "DbiResourceId", + "Selector": "$.Payload.resource.Details.AwsRdsDbInstance.DbiResourceId", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableMultiAZOnRDSInstance", + "RuntimeParameters": { + "ApplyImmediately": true, + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "DbiResourceId": "{{ ParseInput.DbiResourceId }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Configured RDS cluster for multiple Availability Zones", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_RDS.5", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the RDS.5 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableMultiAZOnRDSInstance", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_RDS.5", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksRDS6082B0D6B": { + "Condition": "ControlRunbooksEnableRDS6Condition4A60A39B", + "DependsOn": [ + "CreateWait6", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_RDS.6 + +## What does this document do? +This document enables \`Enhanced Monitoring\` on a given Amazon RDS instance by calling another SSM document. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* VerifyRemediation.Output - The standard HTTP response from the ModifyDBInstance API. +## Documentation Links + +* [AWS FSBP RDS.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-6) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "RDS.6", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + { + "Name": "DbiResourceId", + "Selector": "$.Payload.resource.Details.AwsRdsDbInstance.DbiResourceId", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "GetRole", + "RoleName": "SO0111-RDSMonitoring-remediationRole", + "Service": "iam", + }, + "name": "GetMonitoringRoleArn", + "outputs": [ + { + "Name": "Arn", + "Selector": "$.Role.Arn", + "Type": "String", + }, + ], + "timeoutSeconds": 600, + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableEnhancedMonitoringOnRDSInstance", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "MonitoringRoleArn": "{{ GetMonitoringRoleArn.Arn }}", + "ResourceId": "{{ ParseInput.DbiResourceId }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Enhanced Monitoring enabled on RDS DB cluster", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_RDS.6", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the RDS.6 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableEnhancedMonitoringOnRDSInstance", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_RDS.6", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksRDS715C0A01A": { + "Condition": "ControlRunbooksEnableRDS7ConditionE53509B0", + "DependsOn": [ + "CreateWait6", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_RDS.7 + +## What does this document do? +This document enables \`Deletion Protection\` on a given Amazon RDS cluster by calling another SSM document. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output - The standard HTTP response from the ModifyDBCluster API. + +## Documentation Links +* [AWS FSBP RDS.7](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-7) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "RDS.7", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + { + "Name": "DbiResourceId", + "Selector": "$.Payload.details.AwsRdsDbCluster.DbClusterResourceId", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableRDSClusterDeletionProtection", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "ClusterId": "{{ ParseInput.DbiResourceId }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Deletion protection enabled on RDS DB cluster", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_RDS.7", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the RDS.7 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableRDSClusterDeletionProtection", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_RDS.7", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksRDS89256480A": { + "Condition": "ControlRunbooksEnableRDS8Condition8F460AB5", + "DependsOn": [ + "CreateWait7", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.RDS.8 + +## What does this document do? +This document enables \`Deletion Protection\` on a given Amazon RDS cluster by calling another SSM document. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Optional) The ARN of the role that allows Automation to perform the actions on your behalf. +* RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. + +## Documentation Links +* [AWS FSBP RDS.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-8) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "RDS.8", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + { + "Name": "DbiResourceId", + "Selector": "$.Payload.resource.Details.AwsRdsDbInstance.DbiResourceId", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableRDSInstanceDeletionProtection", + "RuntimeParameters": { + "ApplyImmediately": true, + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "DbInstanceResourceId": "{{ ParseInput.DbiResourceId }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Enabled deletion protection on RDS instance", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_RDS.8", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the RDS.8 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableRDSInstanceDeletionProtection", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_RDS.8", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksRedshift1789871EB": { + "Condition": "ControlRunbooksEnableRedshift1Condition3449D560", + "DependsOn": [ + "CreateWait7", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_Redshift.1 + +## What does this document do? +This document disables public access to a Redshift cluster by calling another SSM document + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Optional) The ARN of the role that allows Automation to perform the actions on your behalf. +* RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. + +## Documentation Links +* [AWS FSBP Redshift.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-redshift-1) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "Redshift.1", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):redshift:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:cluster:(?!.*--)([a-z][a-z0-9-]{0,62})(? 1: + return param.split(",") + return param + + +def get_input_params(event, _): + security_hub_input_params = event["SecHubInputParams"] + + default_params = event["DefaultParams"] + + input_params = {} + + for param in default_params: + if param in security_hub_input_params: + converted_param = parse_non_string_types(security_hub_input_params[param]) + input_params[param] = converted_param + else: + converted_param = parse_non_string_types(default_params[param]) + input_params[param] = converted_param + + return input_params +", + }, + "name": "GetInputParams", + "outputs": [ + { + "Name": "eventTypes", + "Selector": "$.Payload.eventTypes", + "Type": "StringList", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableBucketEventNotifications", + "RuntimeParameters": { + "AccountId": "{{ ParseInput.RemediationAccount }}", + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "BucketName": "{{ ParseInput.BucketName }}", + "EventTypes": "{{ GetInputParams.eventTypes }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Configured event notifications to an S3 Bucket.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_S3.11", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the S3.11 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableBucketEventNotifications", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_S3.11", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksS311C5AAD45": { + "Condition": "ControlRunbooksEnableS31Condition25C33B3F", + "DependsOn": [ + "CreateWait8", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_S3.1 + +## What does this document do? +This document blocks public access to all buckets by default at the account level. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP v1.0.0 S3.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-1) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "S3.1", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-ConfigureS3PublicAccessBlock", + "RuntimeParameters": { + "AccountId": "{{ ParseInput.RemediationAccount }}", + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "BlockPublicAcls": true, + "BlockPublicPolicy": true, + "IgnorePublicAcls": true, + "RestrictPublicBuckets": true, + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Configured the account to block public S3 access.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_S3.1", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the S3.1 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-ConfigureS3PublicAccessBlock", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_S3.1", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksS313756F060B": { + "Condition": "ControlRunbooksEnableS313ConditionA95162A4", + "DependsOn": [ + "CreateWait9", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_S3.13 + +## What does this document do? +This document sets an example lifecycle policy that transfers objects greater than 10 GB to S3 Intelligent Tiering after 90 days. +It is recommended to set lifecycle policies appropriate for the objects stored in your S3 bucket. + +## Input Parameters +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +* BucketName: (Required) Name of the S3 bucket. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP v1.0.0 S3.11](https://docs.aws.amazon.com/securityhub/latest/userguide/s3-controls.html#s3-13) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "S3.13", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):s3:::([a-z0-9.-]{3,63})$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "BucketName", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeScript", + "inputs": { + "Handler": "get_input_params", + "InputPayload": { + "DefaultParams": { + "targetExpirationDays": 0, + "targetTransitionDays": 30, + "targetTransitionStorageClass": "INTELLIGENT_TIERING", + }, + "SecHubInputParams": "{{ ParseInput.InputParams }}", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import re + + +def parse_non_string_types(param): + if re.match("^\\d+$", str(param)): + param = int(param) + return param + if param == "true" or param == "True": + return True + if param == "false" or param == "False": + return False + if isinstance(param, list): + return param + if len(param.split(",")) > 1: + return param.split(",") + return param + + +def get_input_params(event, _): + security_hub_input_params = event["SecHubInputParams"] + + default_params = event["DefaultParams"] + + input_params = {} + + for param in default_params: + if param in security_hub_input_params: + converted_param = parse_non_string_types(security_hub_input_params[param]) + input_params[param] = converted_param + else: + converted_param = parse_non_string_types(default_params[param]) + input_params[param] = converted_param + + return input_params +", + }, + "name": "GetInputParams", + "outputs": [ + { + "Name": "targetTransitionDays", + "Selector": "$.Payload.targetTransitionDays", + "Type": "Integer", + }, + { + "Name": "targetExpirationDays", + "Selector": "$.Payload.targetExpirationDays", + "Type": "Integer", + }, + { + "Name": "targetTransitionStorageClass", + "Selector": "$.Payload.targetTransitionStorageClass", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-SetS3LifecyclePolicy", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "BucketName": "{{ ParseInput.BucketName }}", + "TargetExpirationDays": "{{ GetInputParams.targetExpirationDays }}", + "TargetTransitionDays": "{{ GetInputParams.targetTransitionDays }}", + "TargetTransitionStorageClass": "{{ GetInputParams.targetTransitionStorageClass }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Setting an example lifecycle policy on the S3 bucket.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_S3.13", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the S3.13 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-SetS3LifecyclePolicy", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_S3.13", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksS3260D6E897": { + "Condition": "ControlRunbooksEnableS32ConditionD6F8CCE9", + "DependsOn": [ + "CreateWait8", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_S3.2 + +## What does this document do? +This document blocks all public access to an S3 bucket. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP v1.0.0 S3.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-2) +* [AWS FSBP v1.0.0 S3.3](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-3) +* [AWS FSBP v1.0.0 S3.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-8) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "S3.2", + "S3.3", + "S3.8", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):s3:::([A-Za-z0-9.-]{3,63})$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "BucketName", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-ConfigureS3BucketPublicAccessBlock", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "BlockPublicAcls": true, + "BlockPublicPolicy": true, + "BucketName": "{{ ParseInput.BucketName }}", + "IgnorePublicAcls": true, + "RestrictPublicBuckets": true, + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Disabled public access to S3 bucket.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_S3.2", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the S3.2 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-ConfigureS3BucketPublicAccessBlock", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_S3.2", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksS34F82DA9F1": { + "Condition": "ControlRunbooksEnableS34ConditionC23F6623", + "DependsOn": [ + "CreateWait8", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_S3.4 + +## What does this document do? +This document enables AES-256 as the default encryption for an S3 bucket. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP v1.0.0 S3.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-4) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "S3.4", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):s3:::([A-Za-z0-9.-]{3,63})$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "BucketName", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableDefaultEncryptionS3", + "RuntimeParameters": { + "AccountId": "{{ ParseInput.RemediationAccount }}", + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "BucketName": "{{ ParseInput.BucketName }}", + "KmsKeyAlias": "{{ KmsKeyAlias }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Enabled default encryption for {{ ParseInput.BucketName }}", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_S3.4", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the S3.4 finding", + "type": "StringMap", + }, + "KmsKeyAlias": { + "allowedPattern": "^$|^[a-zA-Z0-9/_-]{1,256}$", + "default": "{{ssm:/Solutions/SO0111/afsbp/1.0.0/S3.4/KmsKeyAlias}}", + "description": "(Required) KMS Customer-Managed Key (CMK) alias or the default value which is created in the SSM parameter at solution deployment (default-s3-encryption) is used to identify that the s3 bucket encryption value should be set to AES-256.", + "type": "String", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableDefaultEncryptionS3", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_S3.4", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksS356959B795": { + "Condition": "ControlRunbooksEnableS35ConditionD5E024B6", + "DependsOn": [ + "CreateWait9", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_S3.5 + +## What does this document do? +This document adds a bucket policy to restrict internet access to https only. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP v1.0.0 S3.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-5) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "S3.5", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):s3:::([A-Za-z0-9.-]{3,63})$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "BucketName", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-SetSSLBucketPolicy", + "RuntimeParameters": { + "AccountId": "{{ ParseInput.RemediationAccount }}", + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "BucketName": "{{ ParseInput.BucketName }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Added SSL-only access policy to S3 bucket.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_S3.5", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the S3.5 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-SetSSLBucketPolicy", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_S3.5", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksS360762680A": { + "Condition": "ControlRunbooksEnableS36ConditionD22273E2", + "DependsOn": [ + "CreateWait9", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_S3.6 + +## What does this document do? +This document restricts cross-account access to a bucket in the local account. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP v1.0.0 S3.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-6) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "S3.6", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):s3:::([A-Za-z0-9.-]{3,63})$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "BucketName", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "DenyListSerialized", + "Selector": "$.Payload.aws_config_rule.InputParameters", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeScript", + "inputs": { + "Handler": "runbook_handler", + "InputPayload": { + "SerializedList": "{{ ParseInput.DenyListSerialized }}", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json + + +def runbook_handler(event, _): + try: + deserialized = json.loads(event["SerializedList"]) + if "blacklistedActionPattern" in deserialized: + return deserialized[ + "blacklistedActionPattern" + ] # Returns comma-delimited list in a string + else: + exit("Missing blacklistedActionPattern in AWS Config data") + except Exception as e: + print(e) + exit( + "Failed getting comma-delimited string list of sensitive API calls input data" + ) +", + }, + "name": "ExtractSensitiveApis", + "outputs": [ + { + "Name": "ListOfApis", + "Selector": "$.Payload", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-S3BlockDenylist", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "BucketName": "{{ ParseInput.BucketName }}", + "DenyList": "{{ ExtractSensitiveApis.ListOfApis }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Added explicit deny for sensitive bucket access from another account.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_S3.6", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the S3.6 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-S3BlockDenylist", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_S3.6", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksS394FBE32FA": { + "Condition": "ControlRunbooksEnableS39Condition7705D6AD", + "DependsOn": [ + "CreateWait9", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-CIS_1.2.0_2.6 + +## What does this document do? +Configures access logging for a CloudTrail S3 bucket. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output - Remediation results + +## Documentation Links +* [CIS v1.2.0 2.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-2.6) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "S3.9", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):s3:::([A-Za-z0-9.-]{3,63})$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "BucketName", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-CreateAccessLoggingBucket", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/SO0111-CreateAccessLoggingBucket", + "BucketName": "so0111-cloudtrailaccesslogs-{{ global:ACCOUNT_ID }}-{{ global:REGION }}", + }, + }, + "name": "CreateAccessLoggingBucket", + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "AWS-ConfigureS3BucketLogging", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "BucketName": "{{ ParseInput.BucketName }}", + "GrantedPermission": [ + "READ", + ], + "GranteeType": [ + "Group", + ], + "GranteeUri": [ + "http://acs.amazonaws.com/groups/s3/LogDelivery", + ], + "TargetBucket": [ + "so0111-cloudtrailaccesslogs-{{ global:ACCOUNT_ID }}-{{ global:REGION }}", + ], + "TargetPrefix": [ + "{{ ParseInput.BucketName }}", + ], + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Created S3 bucket so0111-cloudtrailaccesslogs-{{ global:ACCOUNT_ID }}-{{ global:REGION }} for logging access to {{ ParseInput.BucketName }}", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_S3.9", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the S3.9 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-ConfigureS3BucketLogging", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_S3.9", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksSNS145784CBB": { + "Condition": "ControlRunbooksEnableSNS1Condition7720D1CC", + "DependsOn": [ + "CreateWait10", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": " ### Document Name - ASR-AFSBP_1.0.0_SNS.1 + + ## What does this document do? + This document enables encryption at rest using AWS KMS for SNS topics. + + ## Input Parameters + * Finding: (Required) Security Hub finding details JSON + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + + ## Output Parameters + * Remediation.Output + + ## Documentation Links + * [AWS FSBP v1.0.0 SNS.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-sns-1)", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "SNS.1", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "TopicArn", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableEncryptionForSNSTopic", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "KmsKeyArn": "{{ KmsKeyArn }}", + "TopicArn": "{{ ParseInput.TopicArn }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Encryption enabled on SNS Topic", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_SNS.1", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the SNS.1 finding", + "type": "StringMap", + }, + "KmsKeyArn": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):kms:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:(?:(?:alias\\/[A-Za-z0-9/-_])|(?:key\\/(?:[0-9a-fA-F]{8}-(?:[0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12})))$", + "default": "{{ssm:/Solutions/SO0111/CMK_REMEDIATION_ARN}}", + "type": "String", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableEncryptionForSNSTopic", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_SNS.1", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksSNS2112179CC": { + "Condition": "ControlRunbooksEnableSNS2Condition69621468", + "DependsOn": [ + "CreateWait10", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": " ### Document Name - ASR-AFSBP_1.0.0_SNS.2 + + ## What does this document do? + This document enables logging of delivery status for notification messages sent to a topic. + + ## Input Parameters + * Finding: (Required) Security Hub finding details JSON + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + + ## Output Parameters + * Remediation.Output + + ## Documentation Links + * [AWS FSBP v1.0.0 SNS.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-sns-2)", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "SNS.2", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "SNSTopicArn", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableDeliveryStatusLoggingForSNSTopic", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "LoggingRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/SO0111-SNS2DeliveryStatusLogging-remediationRole", + "SNSTopicArn": "{{ ParseInput.SNSTopicArn }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Delivery Status Logging enabled on SNS Topic", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_SNS.2", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the SNS.2 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableDeliveryStatusLoggingForSNSTopic", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_SNS.2", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksSQS173AA7C81": { + "Condition": "ControlRunbooksEnableSQS1Condition3065B4F2", + "DependsOn": [ + "CreateWait11", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_SQS.1 + +## What does this document do? +This document enables encryption at rest using AWS KMS for SQS Queues. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP v1.0.0 SQS.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-sqs-1) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "SQS.1", + ], + "parse_id_pattern": "^arn:(?:aws|aws-us-gov|aws-cn):sqs:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:([a-zA-Z0-9_-]{1,80}(?:\\.fifo)?)$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "SQSQueueName", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableEncryptionForSQSQueue", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "KmsKeyArn": "{{ KmsKeyArn }}", + "SQSQueueName": "{{ ParseInput.SQSQueueName }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Encryption enabled on SQS Topic", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_SQS.1", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the SQS.1 finding", + "type": "StringMap", + }, + "KmsKeyArn": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):kms:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:(?:(?:alias\\/[A-Za-z0-9/-_])|(?:key\\/(?:[0-9a-fA-F]{8}-(?:[0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12})))$", + "default": "{{ssm:/Solutions/SO0111/CMK_REMEDIATION_ARN}}", + "type": "String", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableEncryptionForSQSQueue", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_SQS.1", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksSSM442CDFB67": { + "Condition": "ControlRunbooksEnableSSM4ConditionD47FCFB5", + "DependsOn": [ + "CreateWait11", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_SSM.4 + +## What does this document do? +This document modifies SSM document permissions to prevent cross-account public access. + +## Input Parameters +* DocumentArn: (Required) SSM Document ARN that will be changed. +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AFSBP v1.0.0 SSM.4](https://docs.aws.amazon.com/securityhub/latest/userguide/ssm-controls.html#ssm-4)", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "SSM.4", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "DocumentArn", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-BlockSSMDocumentPublicAccess", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "DocumentArn": "{{ ParseInput.DocumentArn }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "SSM document changed from public to private", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_SSM.4", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the SSM.4 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-BlockSSMDocumentPublicAccess", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_SSM.4", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksSecretsManager13D89C735": { + "Condition": "ControlRunbooksEnableSecretsManager1ConditionCE635AAF", + "DependsOn": [ + "CreateWait10", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_SecretsManager.1 + +## What does this document do? +This document enables automatic rotation on a Secrets Manager secret if a Lambda function is already associated with it. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AFSBP v1.0.0 SecretsManager.1](https://docs.aws.amazon.com/securityhub/latest/userguide/secretsmanager-controls.html#secretsmanager-1)", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "SecretsManager.1", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "SecretARN", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeScript", + "inputs": { + "Handler": "get_input_params", + "InputPayload": { + "DefaultParams": { + "maximumAllowedRotationFrequency": 90, + }, + "SecHubInputParams": "{{ ParseInput.InputParams }}", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import re + + +def parse_non_string_types(param): + if re.match("^\\d+$", str(param)): + param = int(param) + return param + if param == "true" or param == "True": + return True + if param == "false" or param == "False": + return False + if isinstance(param, list): + return param + if len(param.split(",")) > 1: + return param.split(",") + return param + + +def get_input_params(event, _): + security_hub_input_params = event["SecHubInputParams"] + + default_params = event["DefaultParams"] + + input_params = {} + + for param in default_params: + if param in security_hub_input_params: + converted_param = parse_non_string_types(security_hub_input_params[param]) + input_params[param] = converted_param + else: + converted_param = parse_non_string_types(default_params[param]) + input_params[param] = converted_param + + return input_params +", + }, + "name": "GetInputParams", + "outputs": [ + { + "Name": "maximumAllowedRotationFrequency", + "Selector": "$.Payload.maximumAllowedRotationFrequency", + "Type": "Integer", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableAutoSecretRotation", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "MaximumAllowedRotationFrequency": "{{ GetInputParams.maximumAllowedRotationFrequency }}", + "SecretARN": "{{ ParseInput.SecretARN }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Enabled automatic rotation on secret and set schedule to 90 days.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_SecretsManager.1", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the SecretsManager.1 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableAutoSecretRotation", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_SecretsManager.1", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksSecretsManager313120200": { + "Condition": "ControlRunbooksEnableSecretsManager3Condition04E1FFBB", + "DependsOn": [ + "CreateWait10", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_SecretsManager.3 + +## What does this document do? +This document deletes a secret that has been unused for the number of days specified in the unusedForDays parameter (Default: 90 days). + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +* SecretARN: (Required) The ARN of the Secrets Manager secret. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AFSBP v1.0.0 SecretsManager.3](https://docs.aws.amazon.com/securityhub/latest/userguide/secretsmanager-controls.html#secretsmanager-3)", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "SecretsManager.3", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "SecretARN", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeScript", + "inputs": { + "Handler": "get_input_params", + "InputPayload": { + "DefaultParams": { + "unusedForDays": 90, + }, + "SecHubInputParams": "{{ ParseInput.InputParams }}", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import re + + +def parse_non_string_types(param): + if re.match("^\\d+$", str(param)): + param = int(param) + return param + if param == "true" or param == "True": + return True + if param == "false" or param == "False": + return False + if isinstance(param, list): + return param + if len(param.split(",")) > 1: + return param.split(",") + return param + + +def get_input_params(event, _): + security_hub_input_params = event["SecHubInputParams"] + + default_params = event["DefaultParams"] + + input_params = {} + + for param in default_params: + if param in security_hub_input_params: + converted_param = parse_non_string_types(security_hub_input_params[param]) + input_params[param] = converted_param + else: + converted_param = parse_non_string_types(default_params[param]) + input_params[param] = converted_param + + return input_params +", + }, + "name": "GetInputParams", + "outputs": [ + { + "Name": "UnusedForDays", + "Selector": "$.Payload.unusedForDays", + "Type": "StringList", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-RemoveUnusedSecret", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "SecretARN": "{{ ParseInput.SecretARN }}", + "UnusedForDays": "{{ GetInputParams.UnusedForDays }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Removed the unused secret.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_SecretsManager.3", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the SecretsManager.3 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-RemoveUnusedSecret", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_SecretsManager.3", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksSecretsManager42177BC07": { + "Condition": "ControlRunbooksEnableSecretsManager4ConditionCE71F44A", + "DependsOn": [ + "CreateWait10", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_SecretsManager.4 + +## What does this document do? +This document rotates a secret and sets its rotation period to 90 days. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AFSBP v1.0.0 SecretsManager.4](https://docs.aws.amazon.com/securityhub/latest/userguide/secretsmanager-controls.html#secretsmanager-4)", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "SecretsManager.4", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "SecretARN", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeScript", + "inputs": { + "Handler": "get_input_params", + "InputPayload": { + "DefaultParams": { + "maxDaysSinceRotation": 90, + }, + "SecHubInputParams": "{{ ParseInput.InputParams }}", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import re + + +def parse_non_string_types(param): + if re.match("^\\d+$", str(param)): + param = int(param) + return param + if param == "true" or param == "True": + return True + if param == "false" or param == "False": + return False + if isinstance(param, list): + return param + if len(param.split(",")) > 1: + return param.split(",") + return param + + +def get_input_params(event, _): + security_hub_input_params = event["SecHubInputParams"] + + default_params = event["DefaultParams"] + + input_params = {} + + for param in default_params: + if param in security_hub_input_params: + converted_param = parse_non_string_types(security_hub_input_params[param]) + input_params[param] = converted_param + else: + converted_param = parse_non_string_types(default_params[param]) + input_params[param] = converted_param + + return input_params +", + }, + "name": "GetInputParams", + "outputs": [ + { + "Name": "MaxDaysSinceRotation", + "Selector": "$.Payload.maxDaysSinceRotation", + "Type": "StringList", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-UpdateSecretRotationPeriod", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "MaxDaysSinceRotation": "{{ GetInputParams.MaxDaysSinceRotation }}", + "SecretARN": "{{ ParseInput.SecretARN }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Rotated secret and set rotation schedule to 90 days.", + "UpdatedBy": "ASR-NIST80053R5_5.0.0_SecretsManager.4", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the SecretsManager.4 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-UpdateSecretRotationPeriod", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-NIST80053R5_5.0.0_SecretsManager.4", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "CreateWait0": { + "DeletionPolicy": "Delete", + "Properties": { + "CreateIntervalSeconds": 1, + "DeleteIntervalSeconds": 0, + "DocumentPropertiesHash": "8433de50c0ce2a1e8ddfeff81d8a9817ec415aac639f4dbc20cc84f2454adf9f", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 1, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "CreateWait1": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "CreateWait0", + ], + "Properties": { + "CreateIntervalSeconds": 1, + "DeleteIntervalSeconds": 0, + "DocumentPropertiesHash": "a5d59741b6b3bdc8f9bfec6c2d56983b9c1e847e285a688afa156f8fe3b66bee", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 1, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "CreateWait10": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "CreateWait9", + ], + "Properties": { + "CreateIntervalSeconds": 1, + "DeleteIntervalSeconds": 0, + "DocumentPropertiesHash": "89c8012cd3b0bc1b6a71febbfaa7aa8c42cb03f7003bda451f2921daca7de866", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 1, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "CreateWait11": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "CreateWait10", + ], + "Properties": { + "CreateIntervalSeconds": 1, + "DeleteIntervalSeconds": 0, + "DocumentPropertiesHash": "5fddc52e529312dfdfbe1aeacd6ff8992979818bb88718a02f61709be6d87c48", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 1, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "CreateWait2": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "CreateWait1", + ], + "Properties": { + "CreateIntervalSeconds": 1, + "DeleteIntervalSeconds": 0, + "DocumentPropertiesHash": "b92108469a05a2ea0831d76a91ac975422acce2af642ed0379424d05e8743a92", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 1, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "CreateWait3": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "CreateWait2", + ], + "Properties": { + "CreateIntervalSeconds": 1, + "DeleteIntervalSeconds": 0, + "DocumentPropertiesHash": "bd52e7817a2acae7258b13c4142b1a45a8980b49572db8cfef5ecf9ec5ebeb74", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 1, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "CreateWait4": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "CreateWait3", + ], + "Properties": { + "CreateIntervalSeconds": 1, + "DeleteIntervalSeconds": 0, + "DocumentPropertiesHash": "14f9793c1aae3088a32ff91c3a3ef9b64d9b9bd31dbda0f92cc9bffccd32618e", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 1, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "CreateWait5": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "CreateWait4", + ], + "Properties": { + "CreateIntervalSeconds": 1, + "DeleteIntervalSeconds": 0, + "DocumentPropertiesHash": "b1302b459c06ac59a2e964912ba0ad4080968090b895cc31666f70264f5d8b96", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 1, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "CreateWait6": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "CreateWait5", + ], + "Properties": { + "CreateIntervalSeconds": 1, + "DeleteIntervalSeconds": 0, + "DocumentPropertiesHash": "7d3c79ad721baf94c901a74a8490e4f2744fe35928e0e4cb107a9ca754a389b5", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 1, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "CreateWait7": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "CreateWait6", + ], + "Properties": { + "CreateIntervalSeconds": 1, + "DeleteIntervalSeconds": 0, + "DocumentPropertiesHash": "bfa4fd2f2c68bad7ea0c7fe712c7a38eb3c36d9f3fc97656cd67b8fe348c03f5", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 1, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "CreateWait8": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "CreateWait7", + ], + "Properties": { + "CreateIntervalSeconds": 1, + "DeleteIntervalSeconds": 0, + "DocumentPropertiesHash": "0f501814de8099cae3482fd37a5e5993ae8e4d30a678616fe52a700b57f1c6c4", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 1, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "CreateWait9": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "CreateWait8", + ], + "Properties": { + "CreateIntervalSeconds": 1, + "DeleteIntervalSeconds": 0, + "DocumentPropertiesHash": "59fc8ecdc630b4a2bb594437f74f8d68a80fd0629161510d4ab8d7bda121a9cb", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 1, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "DeletWait0": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "Gate0", + ], + "Properties": { + "CreateIntervalSeconds": 0, + "DeleteIntervalSeconds": 0.5, + "DocumentPropertiesHash": "8433de50c0ce2a1e8ddfeff81d8a9817ec415aac639f4dbc20cc84f2454adf9f", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 0, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "DeletWait1": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "DeletWait0", + "Gate1", + ], + "Properties": { + "CreateIntervalSeconds": 0, + "DeleteIntervalSeconds": 0.5, + "DocumentPropertiesHash": "a5d59741b6b3bdc8f9bfec6c2d56983b9c1e847e285a688afa156f8fe3b66bee", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 0, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "DeletWait10": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "DeletWait9", + "Gate10", + ], + "Properties": { + "CreateIntervalSeconds": 0, + "DeleteIntervalSeconds": 0.5, + "DocumentPropertiesHash": "89c8012cd3b0bc1b6a71febbfaa7aa8c42cb03f7003bda451f2921daca7de866", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 0, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "DeletWait11": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "DeletWait10", + "Gate11", + ], + "Properties": { + "CreateIntervalSeconds": 0, + "DeleteIntervalSeconds": 0.5, + "DocumentPropertiesHash": "5fddc52e529312dfdfbe1aeacd6ff8992979818bb88718a02f61709be6d87c48", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 0, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "DeletWait2": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "DeletWait1", + "Gate2", + ], + "Properties": { + "CreateIntervalSeconds": 0, + "DeleteIntervalSeconds": 0.5, + "DocumentPropertiesHash": "b92108469a05a2ea0831d76a91ac975422acce2af642ed0379424d05e8743a92", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 0, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "DeletWait3": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "DeletWait2", + "Gate3", + ], + "Properties": { + "CreateIntervalSeconds": 0, + "DeleteIntervalSeconds": 0.5, + "DocumentPropertiesHash": "bd52e7817a2acae7258b13c4142b1a45a8980b49572db8cfef5ecf9ec5ebeb74", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 0, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "DeletWait4": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "DeletWait3", + "Gate4", + ], + "Properties": { + "CreateIntervalSeconds": 0, + "DeleteIntervalSeconds": 0.5, + "DocumentPropertiesHash": "14f9793c1aae3088a32ff91c3a3ef9b64d9b9bd31dbda0f92cc9bffccd32618e", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 0, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "DeletWait5": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "DeletWait4", + "Gate5", + ], + "Properties": { + "CreateIntervalSeconds": 0, + "DeleteIntervalSeconds": 0.5, + "DocumentPropertiesHash": "b1302b459c06ac59a2e964912ba0ad4080968090b895cc31666f70264f5d8b96", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 0, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "DeletWait6": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "DeletWait5", + "Gate6", + ], + "Properties": { + "CreateIntervalSeconds": 0, + "DeleteIntervalSeconds": 0.5, + "DocumentPropertiesHash": "7d3c79ad721baf94c901a74a8490e4f2744fe35928e0e4cb107a9ca754a389b5", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 0, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "DeletWait7": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "DeletWait6", + "Gate7", + ], + "Properties": { + "CreateIntervalSeconds": 0, + "DeleteIntervalSeconds": 0.5, + "DocumentPropertiesHash": "bfa4fd2f2c68bad7ea0c7fe712c7a38eb3c36d9f3fc97656cd67b8fe348c03f5", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 0, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "DeletWait8": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "DeletWait7", + "Gate8", + ], + "Properties": { + "CreateIntervalSeconds": 0, + "DeleteIntervalSeconds": 0.5, + "DocumentPropertiesHash": "0f501814de8099cae3482fd37a5e5993ae8e4d30a678616fe52a700b57f1c6c4", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 0, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "DeletWait9": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "DeletWait8", + "Gate9", + ], + "Properties": { + "CreateIntervalSeconds": 0, + "DeleteIntervalSeconds": 0.5, + "DocumentPropertiesHash": "59fc8ecdc630b4a2bb594437f74f8d68a80fd0629161510d4ab8d7bda121a9cb", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 0, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "Gate0": { + "Metadata": { + "ControlRunbooksAutoScaling1BA109277Ready": { + "Fn::If": [ + "ControlRunbooksEnableAutoScaling1ConditionD5DF4981", + { + "Ref": "ControlRunbooksAutoScaling1BA109277", + }, + "", + ], + }, + "ControlRunbooksCloudFormation12CB945DBReady": { + "Fn::If": [ + "ControlRunbooksEnableCloudFormation1ConditionD8D32097", + { + "Ref": "ControlRunbooksCloudFormation12CB945DB", + }, + "", + ], + }, + "ControlRunbooksCloudFront116F66FF8Ready": { + "Fn::If": [ + "ControlRunbooksEnableCloudFront1ConditionD78B5553", + { + "Ref": "ControlRunbooksCloudFront116F66FF8", + }, + "", + ], + }, + "ControlRunbooksCloudFront1283E53E96Ready": { + "Fn::If": [ + "ControlRunbooksEnableCloudFront12Condition59835E00", + { + "Ref": "ControlRunbooksCloudFront1283E53E96", + }, + "", + ], + }, + "ControlRunbooksCloudTrail1B15F1A13Ready": { + "Fn::If": [ + "ControlRunbooksEnableCloudTrail1ConditionB7EBAA86", + { + "Ref": "ControlRunbooksCloudTrail1B15F1A13", + }, + "", + ], + }, + }, + "Type": "AWS::CloudFormation::WaitConditionHandle", + }, + "Gate1": { + "Metadata": { + "ControlRunbooksCloudTrail2979D0B5DReady": { + "Fn::If": [ + "ControlRunbooksEnableCloudTrail2ConditionC182A10F", + { + "Ref": "ControlRunbooksCloudTrail2979D0B5D", + }, + "", + ], + }, + "ControlRunbooksCloudTrail4057F669FReady": { + "Fn::If": [ + "ControlRunbooksEnableCloudTrail4Condition587734A2", + { + "Ref": "ControlRunbooksCloudTrail4057F669F", + }, + "", + ], + }, + "ControlRunbooksCloudTrail54F5ED8E4Ready": { + "Fn::If": [ + "ControlRunbooksEnableCloudTrail5Condition17B6B536", + { + "Ref": "ControlRunbooksCloudTrail54F5ED8E4", + }, + "", + ], + }, + "ControlRunbooksCodeBuild2A2751671Ready": { + "Fn::If": [ + "ControlRunbooksEnableCodeBuild2ConditionB01F473D", + { + "Ref": "ControlRunbooksCodeBuild2A2751671", + }, + "", + ], + }, + "ControlRunbooksCodeBuild509682556Ready": { + "Fn::If": [ + "ControlRunbooksEnableCodeBuild5Condition5FF93A0A", + { + "Ref": "ControlRunbooksCodeBuild509682556", + }, + "", + ], + }, + }, + "Type": "AWS::CloudFormation::WaitConditionHandle", + }, + "Gate10": { + "Metadata": { + "ControlRunbooksSNS145784CBBReady": { + "Fn::If": [ + "ControlRunbooksEnableSNS1Condition7720D1CC", + { + "Ref": "ControlRunbooksSNS145784CBB", + }, + "", + ], + }, + "ControlRunbooksSNS2112179CCReady": { + "Fn::If": [ + "ControlRunbooksEnableSNS2Condition69621468", + { + "Ref": "ControlRunbooksSNS2112179CC", + }, + "", + ], + }, + "ControlRunbooksSecretsManager13D89C735Ready": { + "Fn::If": [ + "ControlRunbooksEnableSecretsManager1ConditionCE635AAF", + { + "Ref": "ControlRunbooksSecretsManager13D89C735", + }, + "", + ], + }, + "ControlRunbooksSecretsManager313120200Ready": { + "Fn::If": [ + "ControlRunbooksEnableSecretsManager3Condition04E1FFBB", + { + "Ref": "ControlRunbooksSecretsManager313120200", + }, + "", + ], + }, + "ControlRunbooksSecretsManager42177BC07Ready": { + "Fn::If": [ + "ControlRunbooksEnableSecretsManager4ConditionCE71F44A", + { + "Ref": "ControlRunbooksSecretsManager42177BC07", + }, + "", + ], + }, + }, + "Type": "AWS::CloudFormation::WaitConditionHandle", + }, + "Gate11": { + "Metadata": { + "ControlRunbooksSQS173AA7C81Ready": { + "Fn::If": [ + "ControlRunbooksEnableSQS1Condition3065B4F2", + { + "Ref": "ControlRunbooksSQS173AA7C81", + }, + "", + ], + }, + "ControlRunbooksSSM442CDFB67Ready": { + "Fn::If": [ + "ControlRunbooksEnableSSM4ConditionD47FCFB5", + { + "Ref": "ControlRunbooksSSM442CDFB67", + }, + "", + ], + }, + }, + "Type": "AWS::CloudFormation::WaitConditionHandle", + }, + "Gate2": { + "Metadata": { + "ControlRunbooksConfig1512B566FReady": { + "Fn::If": [ + "ControlRunbooksEnableConfig1Condition8CEB8627", + { + "Ref": "ControlRunbooksConfig1512B566F", + }, + "", + ], + }, + "ControlRunbooksEC214D3BB404Ready": { + "Fn::If": [ + "ControlRunbooksEnableEC21ConditionD4F1277B", + { + "Ref": "ControlRunbooksEC214D3BB404", + }, + "", + ], + }, + "ControlRunbooksEC22ED852ADFReady": { + "Fn::If": [ + "ControlRunbooksEnableEC22ConditionB9E0D42E", + { + "Ref": "ControlRunbooksEC22ED852ADF", + }, + "", + ], + }, + "ControlRunbooksEC247C182546Ready": { + "Fn::If": [ + "ControlRunbooksEnableEC24Condition72408A1B", + { + "Ref": "ControlRunbooksEC247C182546", + }, + "", + ], + }, + "ControlRunbooksEC267E3087AEReady": { + "Fn::If": [ + "ControlRunbooksEnableEC26ConditionF1F880B0", + { + "Ref": "ControlRunbooksEC267E3087AE", + }, + "", + ], + }, + }, + "Type": "AWS::CloudFormation::WaitConditionHandle", + }, + "Gate3": { + "Metadata": { + "ControlRunbooksEC213D7C9C1EBReady": { + "Fn::If": [ + "ControlRunbooksEnableEC213Condition567EA275", + { + "Ref": "ControlRunbooksEC213D7C9C1EB", + }, + "", + ], + }, + "ControlRunbooksEC2153B43E7A8Ready": { + "Fn::If": [ + "ControlRunbooksEnableEC215Condition52A7DE4B", + { + "Ref": "ControlRunbooksEC2153B43E7A8", + }, + "", + ], + }, + "ControlRunbooksEC218DB9589DDReady": { + "Fn::If": [ + "ControlRunbooksEnableEC218Condition903B1C90", + { + "Ref": "ControlRunbooksEC218DB9589DD", + }, + "", + ], + }, + "ControlRunbooksEC277719A4CDReady": { + "Fn::If": [ + "ControlRunbooksEnableEC27ConditionC77CF056", + { + "Ref": "ControlRunbooksEC277719A4CD", + }, + "", + ], + }, + "ControlRunbooksEC287C39A9F1Ready": { + "Fn::If": [ + "ControlRunbooksEnableEC28Condition4C4640B8", + { + "Ref": "ControlRunbooksEC287C39A9F1", + }, + "", + ], + }, + }, + "Type": "AWS::CloudFormation::WaitConditionHandle", + }, + "Gate4": { + "Metadata": { + "ControlRunbooksEC2197047C726Ready": { + "Fn::If": [ + "ControlRunbooksEnableEC219Condition2421DE99", + { + "Ref": "ControlRunbooksEC2197047C726", + }, + "", + ], + }, + "ControlRunbooksEC223EAFC5818Ready": { + "Fn::If": [ + "ControlRunbooksEnableEC223Condition795CB580", + { + "Ref": "ControlRunbooksEC223EAFC5818", + }, + "", + ], + }, + "ControlRunbooksECR16DEF82C5Ready": { + "Fn::If": [ + "ControlRunbooksEnableECR1Condition70BCAF70", + { + "Ref": "ControlRunbooksECR16DEF82C5", + }, + "", + ], + }, + "ControlRunbooksGuardDuty15E0D2BEAReady": { + "Fn::If": [ + "ControlRunbooksEnableGuardDuty1Condition97849740", + { + "Ref": "ControlRunbooksGuardDuty15E0D2BEA", + }, + "", + ], + }, + "ControlRunbooksIAM3DC25477EReady": { + "Fn::If": [ + "ControlRunbooksEnableIAM3Condition3AA0E892", + { + "Ref": "ControlRunbooksIAM3DC25477E", + }, + "", + ], + }, + }, + "Type": "AWS::CloudFormation::WaitConditionHandle", + }, + "Gate5": { + "Metadata": { + "ControlRunbooksIAM70A808F7CReady": { + "Fn::If": [ + "ControlRunbooksEnableIAM7ConditionDF8E776B", + { + "Ref": "ControlRunbooksIAM70A808F7C", + }, + "", + ], + }, + "ControlRunbooksIAM8632E03EDReady": { + "Fn::If": [ + "ControlRunbooksEnableIAM8Condition9CA5CB4B", + { + "Ref": "ControlRunbooksIAM8632E03ED", + }, + "", + ], + }, + "ControlRunbooksKMS41A22BB8DReady": { + "Fn::If": [ + "ControlRunbooksEnableKMS4Condition710C0C5C", + { + "Ref": "ControlRunbooksKMS41A22BB8D", + }, + "", + ], + }, + "ControlRunbooksLambda1F6ECACF8Ready": { + "Fn::If": [ + "ControlRunbooksEnableLambda1Condition077CECAF", + { + "Ref": "ControlRunbooksLambda1F6ECACF8", + }, + "", + ], + }, + "ControlRunbooksRDS1D73701E9Ready": { + "Fn::If": [ + "ControlRunbooksEnableRDS1ConditionFAE5B7EA", + { + "Ref": "ControlRunbooksRDS1D73701E9", + }, + "", + ], + }, + }, + "Type": "AWS::CloudFormation::WaitConditionHandle", + }, + "Gate6": { + "Metadata": { + "ControlRunbooksRDS2FBE04686Ready": { + "Fn::If": [ + "ControlRunbooksEnableRDS2Condition4FD00FE6", + { + "Ref": "ControlRunbooksRDS2FBE04686", + }, + "", + ], + }, + "ControlRunbooksRDS4C82F2410Ready": { + "Fn::If": [ + "ControlRunbooksEnableRDS4Condition2E89346E", + { + "Ref": "ControlRunbooksRDS4C82F2410", + }, + "", + ], + }, + "ControlRunbooksRDS5CECD9314Ready": { + "Fn::If": [ + "ControlRunbooksEnableRDS5ConditionEC2574C3", + { + "Ref": "ControlRunbooksRDS5CECD9314", + }, + "", + ], + }, + "ControlRunbooksRDS6082B0D6BReady": { + "Fn::If": [ + "ControlRunbooksEnableRDS6Condition4A60A39B", + { + "Ref": "ControlRunbooksRDS6082B0D6B", + }, + "", + ], + }, + "ControlRunbooksRDS715C0A01AReady": { + "Fn::If": [ + "ControlRunbooksEnableRDS7ConditionE53509B0", + { + "Ref": "ControlRunbooksRDS715C0A01A", + }, + "", + ], + }, + }, + "Type": "AWS::CloudFormation::WaitConditionHandle", + }, + "Gate7": { + "Metadata": { + "ControlRunbooksRDS13FCEA51BDReady": { + "Fn::If": [ + "ControlRunbooksEnableRDS13Condition0E8A44B3", + { + "Ref": "ControlRunbooksRDS13FCEA51BD", + }, + "", + ], + }, + "ControlRunbooksRDS16EB04DCBFReady": { + "Fn::If": [ + "ControlRunbooksEnableRDS16ConditionCB5C3E8F", + { + "Ref": "ControlRunbooksRDS16EB04DCBF", + }, + "", + ], + }, + "ControlRunbooksRDS89256480AReady": { + "Fn::If": [ + "ControlRunbooksEnableRDS8Condition8F460AB5", + { + "Ref": "ControlRunbooksRDS89256480A", + }, + "", + ], + }, + "ControlRunbooksRedshift1789871EBReady": { + "Fn::If": [ + "ControlRunbooksEnableRedshift1Condition3449D560", + { + "Ref": "ControlRunbooksRedshift1789871EB", + }, + "", + ], + }, + "ControlRunbooksRedshift3106C10FFReady": { + "Fn::If": [ + "ControlRunbooksEnableRedshift3ConditionC65BAEF6", + { + "Ref": "ControlRunbooksRedshift3106C10FF", + }, + "", + ], + }, + }, + "Type": "AWS::CloudFormation::WaitConditionHandle", + }, + "Gate8": { + "Metadata": { + "ControlRunbooksRedshift475A78168Ready": { + "Fn::If": [ + "ControlRunbooksEnableRedshift4Condition2377F6B5", + { + "Ref": "ControlRunbooksRedshift475A78168", + }, + "", + ], + }, + "ControlRunbooksRedshift658631424Ready": { + "Fn::If": [ + "ControlRunbooksEnableRedshift6Condition5A51FC97", + { + "Ref": "ControlRunbooksRedshift658631424", + }, + "", + ], + }, + "ControlRunbooksS311C5AAD45Ready": { + "Fn::If": [ + "ControlRunbooksEnableS31Condition25C33B3F", + { + "Ref": "ControlRunbooksS311C5AAD45", + }, + "", + ], + }, + "ControlRunbooksS3260D6E897Ready": { + "Fn::If": [ + "ControlRunbooksEnableS32ConditionD6F8CCE9", + { + "Ref": "ControlRunbooksS3260D6E897", + }, + "", + ], + }, + "ControlRunbooksS34F82DA9F1Ready": { + "Fn::If": [ + "ControlRunbooksEnableS34ConditionC23F6623", + { + "Ref": "ControlRunbooksS34F82DA9F1", + }, + "", + ], + }, + }, + "Type": "AWS::CloudFormation::WaitConditionHandle", + }, + "Gate9": { + "Metadata": { + "ControlRunbooksS31114BF0AC9Ready": { + "Fn::If": [ + "ControlRunbooksEnableS311Condition6AA79443", + { + "Ref": "ControlRunbooksS31114BF0AC9", + }, + "", + ], + }, + "ControlRunbooksS313756F060BReady": { + "Fn::If": [ + "ControlRunbooksEnableS313ConditionA95162A4", + { + "Ref": "ControlRunbooksS313756F060B", + }, + "", + ], + }, + "ControlRunbooksS356959B795Ready": { + "Fn::If": [ + "ControlRunbooksEnableS35ConditionD5E024B6", + { + "Ref": "ControlRunbooksS356959B795", + }, + "", + ], + }, + "ControlRunbooksS360762680AReady": { + "Fn::If": [ + "ControlRunbooksEnableS36ConditionD22273E2", + { + "Ref": "ControlRunbooksS360762680A", + }, + "", + ], + }, + "ControlRunbooksS394FBE32FAReady": { + "Fn::If": [ + "ControlRunbooksEnableS39Condition7705D6AD", + { + "Ref": "ControlRunbooksS394FBE32FA", + }, + "", + ], + }, + }, + "Type": "AWS::CloudFormation::WaitConditionHandle", + }, + }, +} +`; diff --git a/source/playbooks/NIST80053/test/nist_stack.test.ts b/source/playbooks/NIST80053/test/nist_stack.test.ts new file mode 100644 index 00000000..66d15577 --- /dev/null +++ b/source/playbooks/NIST80053/test/nist_stack.test.ts @@ -0,0 +1,50 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { App, DefaultStackSynthesizer, Stack } from 'aws-cdk-lib'; +import { Template } from 'aws-cdk-lib/assertions'; +import { PlaybookPrimaryStack } from '../../../lib/sharrplaybook-construct'; +import { NIST80053PlaybookMemberStack } from '../lib/NIST80053_playbook-construct'; + +function getPrimaryStack(): Stack { + const app = new App(); + const stack = new PlaybookPrimaryStack(app, 'primaryStack', { + synthesizer: new DefaultStackSynthesizer({ generateBootstrapVersionRule: false }), + description: 'test;', + solutionId: 'SO0111', + solutionVersion: 'v2.1.0', + solutionDistBucket: 'asrbukkit', + solutionDistName: 'automated-security-response-on-aws', + remediations: [{ control: 'Example.3' }, { control: 'Example.5' }, { control: 'Example.1' }], + securityStandard: 'NIST80053R5', + securityStandardLongName: 'nist-800-53', + securityStandardVersion: '5.0.0', + }); + return stack; +} + +test('Admin Stack - NIST', () => { + expect(Template.fromStack(getPrimaryStack())).toMatchSnapshot(); +}); + +function getMemberStack(): Stack { + const app = new App(); + const stack = new NIST80053PlaybookMemberStack(app, 'memberStack', { + synthesizer: new DefaultStackSynthesizer({ generateBootstrapVersionRule: false }), + description: 'test;', + solutionId: 'SO0111', + solutionVersion: 'v1.1.1', + solutionDistBucket: 'asrbukkit', + securityStandard: 'NIST80053R5', + securityStandardLongName: 'nist-800-53', + securityStandardVersion: '5.0.0', + ssmdocs: 'playbooks/NIST80053/ssmdocs', + commonScripts: 'playbooks/common', + remediations: [{ control: 'EC2.1' }, { control: 'RDS.1' }, { control: 'Lambda.1' }], + }); + + return stack; +} + +test('Member Stack - NIST', () => { + expect(Template.fromStack(getMemberStack())).toMatchSnapshot(); +}); diff --git a/source/playbooks/NIST80053/tsconfig.json b/source/playbooks/NIST80053/tsconfig.json new file mode 100644 index 00000000..4082f16a --- /dev/null +++ b/source/playbooks/NIST80053/tsconfig.json @@ -0,0 +1,3 @@ +{ + "extends": "../../tsconfig.json" +} diff --git a/source/playbooks/PCI321/bin/pci321.ts b/source/playbooks/PCI321/bin/pci321.ts index 7e4eb222..387381b9 100644 --- a/source/playbooks/PCI321/bin/pci321.ts +++ b/source/playbooks/PCI321/bin/pci321.ts @@ -31,6 +31,7 @@ const remediations: IControl[] = [ { control: 'PCI.CW.1' }, { control: 'PCI.EC2.1' }, { control: 'PCI.EC2.2' }, + { control: 'PCI.GuardDuty.1' }, { control: 'PCI.IAM.8' }, { control: 'PCI.KMS.1' }, { control: 'PCI.Lambda.1' }, diff --git a/source/playbooks/PCI321/ssmdocs/PCI_PCI.GuardDuty.1.yaml b/source/playbooks/PCI321/ssmdocs/PCI_PCI.GuardDuty.1.yaml new file mode 100644 index 00000000..10eb2806 --- /dev/null +++ b/source/playbooks/PCI321/ssmdocs/PCI_PCI.GuardDuty.1.yaml @@ -0,0 +1,93 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +description: | + ### Document Name - ASR-PCI_3.2.1_GuardDuty.1 + + ## What does this document do? + This document enables Amazon GuardDuty. + + ## Input Parameters + * Finding: (Required) Security Hub finding details JSON + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + * RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. + + ## Output Parameters + * Remediation.Output + + ## Documentation Links + * [PCI GuardDuty.1](https://docs.aws.amazon.com/securityhub/latest/userguide/guardduty-controls.html#guardduty-1) +schemaVersion: '0.3' +assumeRole: '{{ AutomationAssumeRole }}' +parameters: + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + Finding: + type: StringMap + description: The input from the Orchestrator Step function for the PCI.GuardDuty.1 finding + RemediationRoleName: + type: 'String' + default: 'SO0111-EnableGuardDuty' + allowedPattern: '^[\w+=,.@-]+$' + +outputs: + - ParseInput.AffectedObject + - Remediation.Output + +mainSteps: + - + name: ParseInput + action: 'aws:executeScript' + outputs: + - Name: FindingId + Selector: $.Payload.finding.Id + Type: String + - Name: ProductArn + Selector: $.Payload.finding.ProductArn + Type: String + - Name: AffectedObject + Selector: $.Payload.object + Type: StringMap + - Name: SubnetARN + Selector: $.Payload.resource_id + Type: String + - Name: RemediationRegion + Selector: $.Payload.resource_region + Type: String + - Name: RemediationAccount + Selector: $.Payload.account_id + Type: String + inputs: + InputPayload: + Finding: '{{Finding}}' + parse_id_pattern: '' + expected_control_id: + - 'PCI.GuardDuty.1' + Runtime: python3.8 + Handler: parse_event + Script: |- + %%SCRIPT=common/parse_input.py%% + - + name: Remediation + action: 'aws:executeAutomation' + inputs: + DocumentName: ASR-EnableGuardDuty + RuntimeParameters: + AutomationAssumeRole: 'arn:{{global:AWS_PARTITION}}:iam::{{global:ACCOUNT_ID}}:role/{{RemediationRoleName}}' + - + name: UpdateFinding + action: 'aws:executeAwsApi' + inputs: + Service: securityhub + Api: BatchUpdateFindings + FindingIdentifiers: + - Id: '{{ParseInput.FindingId}}' + ProductArn: '{{ParseInput.ProductArn}}' + Note: + Text: 'Enabled Amazon GuardDuty.' + UpdatedBy: 'ASR-PCI_3.2.1_GuardDuty.1' + Workflow: + Status: RESOLVED + description: Update finding diff --git a/source/playbooks/PCI321/ssmdocs/PCI_PCI.IAM.8.yaml b/source/playbooks/PCI321/ssmdocs/PCI_PCI.IAM.8.yaml index 5000f9d9..38d97d78 100644 --- a/source/playbooks/PCI321/ssmdocs/PCI_PCI.IAM.8.yaml +++ b/source/playbooks/PCI321/ssmdocs/PCI_PCI.IAM.8.yaml @@ -9,7 +9,7 @@ description: | ## Security Standards and Controls * CIS 1.5 - 1.11 - * AFSBP IAM.7 + * FSBP IAM.7 * PCI IAM.8 ## Input Parameters diff --git a/source/playbooks/PCI321/ssmdocs/PCI_PCI.RDS.1.yaml b/source/playbooks/PCI321/ssmdocs/PCI_PCI.RDS.1.yaml index 4fc8a85b..5cbc3732 100644 --- a/source/playbooks/PCI321/ssmdocs/PCI_PCI.RDS.1.yaml +++ b/source/playbooks/PCI321/ssmdocs/PCI_PCI.RDS.1.yaml @@ -62,7 +62,7 @@ mainSteps: inputs: InputPayload: Finding: '{{Finding}}' - parse_id_pattern: '^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:(cluster-snapshot|snapshot):([a-zA-Z](?:[0-9a-zA-Z]+-)*[0-9a-zA-Z]+)$' + parse_id_pattern: '^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:(cluster-snapshot|snapshot):([a-zA-Z][0-9a-zA-Z]*(?:-[0-9a-zA-Z]+)*)$' resource_index: 2 expected_control_id: - 'PCI.RDS.1' diff --git a/source/playbooks/PCI321/ssmdocs/scripts/pci_get_input_values.py b/source/playbooks/PCI321/ssmdocs/scripts/pci_get_input_values.py index 81582cb5..769e3c3d 100644 --- a/source/playbooks/PCI321/ssmdocs/scripts/pci_get_input_values.py +++ b/source/playbooks/PCI321/ssmdocs/scripts/pci_get_input_values.py @@ -9,11 +9,10 @@ "metric_value": 1, "alarm_name": "SHARR_Alarm_PCI_321_Finding_CW1_RootAccountUsage", "alarm_desc": "Alarm for PCI finding CW.1 RootAccountUsage", - "alarm_threshold": 1 + "alarm_threshold": 1, } } def verify(event, _): - - return PCI_mappings.get(event['ControlId'], None) + return PCI_mappings.get(event["ControlId"], None) diff --git a/source/playbooks/PCI321/ssmdocs/scripts/test/test_pci_get_input_values.py b/source/playbooks/PCI321/ssmdocs/scripts/test/test_pci_get_input_values.py index 8b9c3ccd..719e3b66 100644 --- a/source/playbooks/PCI321/ssmdocs/scripts/test/test_pci_get_input_values.py +++ b/source/playbooks/PCI321/ssmdocs/scripts/test/test_pci_get_input_values.py @@ -1,9 +1,8 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import pytest - from pci_get_input_values import verify + def expected(): return { "filter_name": "SHARR_Filter_PCI_321_Finding_CW1_RootAccountUsage", @@ -12,8 +11,9 @@ def expected(): "metric_value": 1, "alarm_name": "SHARR_Alarm_PCI_321_Finding_CW1_RootAccountUsage", "alarm_desc": "Alarm for PCI finding CW.1 RootAccountUsage", - "alarm_threshold": 1 + "alarm_threshold": 1, } + def test_verify(): - assert verify({'ControlId': 'PCI.CW.1'}, {}) == expected() + assert verify({"ControlId": "PCI.CW.1"}, {}) == expected() diff --git a/source/playbooks/PCI321/test/__snapshots__/pci321_stack.test.ts.snap b/source/playbooks/PCI321/test/__snapshots__/pci321_stack.test.ts.snap index 4436403f..6fe88aae 100644 --- a/source/playbooks/PCI321/test/__snapshots__/pci321_stack.test.ts.snap +++ b/source/playbooks/PCI321/test/__snapshots__/pci321_stack.test.ts.snap @@ -453,106 +453,109 @@ Default: 30 seconds "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -562,101 +565,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, }", }, "isEnd": false, @@ -808,106 +852,109 @@ Enables VPC Flow Logs for a VPC "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -917,101 +964,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, }", }, "name": "ParseInput", @@ -1140,7 +1228,7 @@ This document establishes a default password policy. ## Security Standards and Controls * CIS 1.5 - 1.11 -* AFSBP IAM.7 +* FSBP IAM.7 * PCI IAM.8 ## Input Parameters @@ -1167,106 +1255,109 @@ This document establishes a default password policy. "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -1276,101 +1367,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, }", }, "isEnd": false, @@ -1466,7 +1598,7 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 1, "DeleteIntervalSeconds": 0, - "DocumentPropertiesHash": "aa2cfc511d1d551f8f9af7857ff3116f745b335bbb687d28fa4001cc503aca03", + "DocumentPropertiesHash": "6f20765950aa8e2fcdd5b86db5ee56bfc35e50d088900de800c37e6b38e85f14", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -1483,7 +1615,7 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 0, "DeleteIntervalSeconds": 0.5, - "DocumentPropertiesHash": "aa2cfc511d1d551f8f9af7857ff3116f745b335bbb687d28fa4001cc503aca03", + "DocumentPropertiesHash": "6f20765950aa8e2fcdd5b86db5ee56bfc35e50d088900de800c37e6b38e85f14", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, diff --git a/source/playbooks/SC/bin/security_controls.ts b/source/playbooks/SC/bin/security_controls.ts index dfed34e6..deb237e7 100644 --- a/source/playbooks/SC/bin/security_controls.ts +++ b/source/playbooks/SC/bin/security_controls.ts @@ -29,6 +29,9 @@ Aspects.of(app).add(new AwsSolutionsChecks()); // Security Standard and Control Id. See cis-member-stack const remediations: IControl[] = [ { control: 'AutoScaling.1' }, + { control: 'CloudFormation.1' }, + { control: 'CloudFront.1' }, + { control: 'CloudFront.12' }, { control: 'CloudTrail.1' }, { control: 'CloudTrail.2' }, { control: 'CloudTrail.3', executes: 'CloudTrail.1' }, @@ -51,13 +54,20 @@ const remediations: IControl[] = [ { control: 'CloudWatch.13', executes: 'CloudWatch.1' }, { control: 'CloudWatch.14', executes: 'CloudWatch.1' }, { control: 'CodeBuild.2' }, + { control: 'CodeBuild.5' }, { control: 'Config.1' }, { control: 'EC2.1' }, { control: 'EC2.2' }, + { control: 'EC2.4' }, { control: 'EC2.6' }, { control: 'EC2.7' }, + { control: 'EC2.8' }, { control: 'EC2.13' }, { control: 'EC2.14', executes: 'EC2.13' }, + { control: 'EC2.15' }, + { control: 'EC2.18' }, + { control: 'EC2.19' }, + { control: 'EC2.23' }, { control: 'IAM.3' }, { control: 'IAM.7' }, { control: 'IAM.8' }, @@ -92,6 +102,16 @@ const remediations: IControl[] = [ { control: 'S3.5' }, { control: 'S3.6' }, { control: 'S3.8', executes: 'S3.2' }, + { control: 'S3.9', executes: 'CloudTrail.7' }, + { control: 'S3.11' }, + { control: 'S3.13' }, + { control: 'SecretsManager.1' }, + { control: 'SecretsManager.3' }, + { control: 'SecretsManager.4' }, + { control: 'SNS.1' }, + { control: 'SNS.2' }, + { control: 'SQS.1' }, + { control: 'SSM.4' }, ]; const adminStack = new SecurityControlsPlaybookPrimaryStack(app, 'SCStack', { diff --git a/source/playbooks/SC/lib/control_runbooks-construct.ts b/source/playbooks/SC/lib/control_runbooks-construct.ts index afe39dc4..167dac5b 100644 --- a/source/playbooks/SC/lib/control_runbooks-construct.ts +++ b/source/playbooks/SC/lib/control_runbooks-construct.ts @@ -7,6 +7,8 @@ import { CfnCondition, CfnParameter, Fn } from 'aws-cdk-lib'; import * as autoscaling_1 from '../ssmdocs/SC_AutoScaling.1'; import * as cloudformation_1 from '../ssmdocs/SC_CloudFormation.1'; +import * as cloudfront_1 from '../ssmdocs/SC_CloudFront.1'; +import * as cloudfront_12 from '../ssmdocs/SC_CloudFront.12'; import * as cloudtrail_1 from '../ssmdocs/SC_CloudTrail.1'; import * as cloudtrail_2 from '../ssmdocs/SC_CloudTrail.2'; import * as cloudtrail_4 from '../ssmdocs/SC_CloudTrail.4'; @@ -15,13 +17,21 @@ import * as cloudtrail_6 from '../ssmdocs/SC_CloudTrail.6'; import * as cloudtrail_7 from '../ssmdocs/SC_CloudTrail.7'; import * as cloudwatch_1 from '../ssmdocs/SC_CloudWatch.1'; import * as codebuild_2 from '../ssmdocs/SC_CodeBuild.2'; +import * as codebuild_5 from '../ssmdocs/SC_CodeBuild.5'; import * as config_1 from '../ssmdocs/SC_Config.1'; import * as ec2_1 from '../ssmdocs/SC_EC2.1'; import * as ec2_2 from '../ssmdocs/SC_EC2.2'; +import * as ec2_4 from '../ssmdocs/SC_EC2.4'; import * as ec2_6 from '../ssmdocs/SC_EC2.6'; import * as ec2_7 from '../ssmdocs/SC_EC2.7'; +import * as ec2_8 from '../ssmdocs/SC_EC2.8'; import * as ec2_13 from '../ssmdocs/SC_EC2.13'; import * as ec2_15 from '../ssmdocs/SC_EC2.15'; +import * as ec2_18 from '../ssmdocs/SC_EC2.18'; +import * as ec2_19 from '../ssmdocs/SC_EC2.19'; +import * as ec2_23 from '../ssmdocs/SC_EC2.23'; +import * as ecr_1 from '../ssmdocs/SC_ECR.1'; +import * as guardduty_1 from '../ssmdocs/SC_GuardDuty.1'; import * as iam_3 from '../ssmdocs/SC_IAM.3'; import * as iam_7 from '../ssmdocs/SC_IAM.7'; import * as iam_8 from '../ssmdocs/SC_IAM.8'; @@ -47,9 +57,15 @@ import * as s3_2 from '../ssmdocs/SC_S3.2'; import * as s3_4 from '../ssmdocs/SC_S3.4'; import * as s3_5 from '../ssmdocs/SC_S3.5'; import * as s3_6 from '../ssmdocs/SC_S3.6'; +import * as s3_11 from '../ssmdocs/SC_S3.11'; +import * as s3_13 from '../ssmdocs/SC_S3.13'; +import * as secretsmanager_1 from '../ssmdocs/SC_SecretsManager.1'; +import * as secretsmanager_3 from '../ssmdocs/SC_SecretsManager.3'; +import * as secretsmanager_4 from '../ssmdocs/SC_SecretsManager.4'; import * as sqs_1 from '../ssmdocs/SC_SQS.1'; import * as sns_1 from '../ssmdocs/SC_SNS.1'; import * as sns_2 from '../ssmdocs/SC_SNS.2'; +import * as ssm_4 from '../ssmdocs/SC_SSM.4'; export interface PlaybookProps { standardShortName: string; @@ -74,6 +90,8 @@ export class ControlRunbooks extends Construct { this.add(autoscaling_1.createControlRunbook(this, 'AutoScaling.1', props)); this.add(cloudformation_1.createControlRunbook(this, 'CloudFormation.1', props)); + this.add(cloudfront_1.createControlRunbook(this, 'CloudFront.1', props)); + this.add(cloudfront_12.createControlRunbook(this, 'CloudFront.12', props)); this.add(cloudtrail_1.createControlRunbook(this, 'CloudTrail.1', props)); this.add(cloudtrail_2.createControlRunbook(this, 'CloudTrail.2', props)); this.add(cloudtrail_4.createControlRunbook(this, 'CloudTrail.4', props)); @@ -82,13 +100,21 @@ export class ControlRunbooks extends Construct { this.add(cloudtrail_7.createControlRunbook(this, 'CloudTrail.7', props)); this.add(cloudwatch_1.createControlRunbook(this, 'CloudWatch.1', props)); this.add(codebuild_2.createControlRunbook(this, 'CodeBuild.2', props)); + this.add(codebuild_5.createControlRunbook(this, 'CodeBuild.5', props)); this.add(config_1.createControlRunbook(this, 'Config.1', props)); this.add(ec2_1.createControlRunbook(this, 'EC2.1', props)); this.add(ec2_2.createControlRunbook(this, 'EC2.2', props)); + this.add(ec2_4.createControlRunbook(this, 'EC2.4', props)); this.add(ec2_6.createControlRunbook(this, 'EC2.6', props)); this.add(ec2_7.createControlRunbook(this, 'EC2.7', props)); + this.add(ec2_8.createControlRunbook(this, 'EC2.8', props)); this.add(ec2_13.createControlRunbook(this, 'EC2.13', props)); this.add(ec2_15.createControlRunbook(this, 'EC2.15', props)); + this.add(ec2_18.createControlRunbook(this, 'EC2.18', props)); + this.add(ec2_19.createControlRunbook(this, 'EC2.19', props)); + this.add(ec2_23.createControlRunbook(this, 'EC2.23', props)); + this.add(ecr_1.createControlRunbook(this, 'ECR.1', props)); + this.add(guardduty_1.createControlRunbook(this, 'GuardDuty.1', props)); this.add(iam_3.createControlRunbook(this, 'IAM.3', props)); this.add(iam_7.createControlRunbook(this, 'IAM.7', props)); this.add(iam_8.createControlRunbook(this, 'IAM.8', props)); @@ -114,9 +140,15 @@ export class ControlRunbooks extends Construct { this.add(s3_4.createControlRunbook(this, 'S3.4', props)); this.add(s3_5.createControlRunbook(this, 'S3.5', props)); this.add(s3_6.createControlRunbook(this, 'S3.6', props)); + this.add(s3_11.createControlRunbook(this, 'S3.11', props)); + this.add(s3_13.createControlRunbook(this, 'S3.13', props)); + this.add(secretsmanager_1.createControlRunbook(this, 'SecretsManager.1', props)); + this.add(secretsmanager_3.createControlRunbook(this, 'SecretsManager.3', props)); + this.add(secretsmanager_4.createControlRunbook(this, 'SecretsManager.4', props)); this.add(sqs_1.createControlRunbook(this, 'SQS.1', props)); this.add(sns_1.createControlRunbook(this, 'SNS.1', props)); this.add(sns_2.createControlRunbook(this, 'SNS.2', props)); + this.add(ssm_4.createControlRunbook(this, 'SSM.4', props)); } protected add(document: ControlRunbookDocument) { diff --git a/source/playbooks/SC/lib/security_controls_playbook-construct.ts b/source/playbooks/SC/lib/security_controls_playbook-construct.ts index d80d1f60..6e96419b 100644 --- a/source/playbooks/SC/lib/security_controls_playbook-construct.ts +++ b/source/playbooks/SC/lib/security_controls_playbook-construct.ts @@ -30,7 +30,7 @@ export class SecurityControlsPlaybookPrimaryStack extends Stack { const RESOURCE_PREFIX = props.solutionId.replace(/^DEV-/, ''); // prefix on every resource name const orchestratorArn = StringParameter.valueForStringParameter( this, - `/Solutions/${RESOURCE_PREFIX}/OrchestratorArn` + `/Solutions/${RESOURCE_PREFIX}/OrchestratorArn`, ); // Register the playbook. These parameters enable the step function to route matching events @@ -103,7 +103,7 @@ export class SecurityControlsPlaybookMemberStack extends Stack { const waitProvider = WaitProvider.fromServiceToken( this, 'WaitProvider', - waitProviderServiceTokenParam.valueAsString + waitProviderServiceTokenParam.valueAsString, ); Aspects.of(this).add(new SsmDocRateLimit(waitProvider)); diff --git a/source/playbooks/SC/ssmdocs/SC_AutoScaling.1.ts b/source/playbooks/SC/ssmdocs/SC_AutoScaling.1.ts index 28bb5518..95e94658 100644 --- a/source/playbooks/SC/ssmdocs/SC_AutoScaling.1.ts +++ b/source/playbooks/SC/ssmdocs/SC_AutoScaling.1.ts @@ -9,7 +9,7 @@ export function createControlRunbook(scope: Construct, id: string, props: Playbo return new EnableAutoScalingGroupELBHealthCheckDocument(scope, id, { ...props, controlId: 'AutoScaling.1' }); } -class EnableAutoScalingGroupELBHealthCheckDocument extends ControlRunbookDocument { +export class EnableAutoScalingGroupELBHealthCheckDocument extends ControlRunbookDocument { constructor(stage: Construct, id: string, props: ControlRunbookProps) { super(stage, id, { ...props, diff --git a/source/playbooks/SC/ssmdocs/SC_CloudFormation.1.ts b/source/playbooks/SC/ssmdocs/SC_CloudFormation.1.ts index adbb80ec..5334d24e 100644 --- a/source/playbooks/SC/ssmdocs/SC_CloudFormation.1.ts +++ b/source/playbooks/SC/ssmdocs/SC_CloudFormation.1.ts @@ -9,7 +9,7 @@ export function createControlRunbook(scope: Construct, id: string, props: Playbo return new ConfigureSNSTopicForStackDocument(scope, id, { ...props, controlId: 'CloudFormation.1' }); } -class ConfigureSNSTopicForStackDocument extends ControlRunbookDocument { +export class ConfigureSNSTopicForStackDocument extends ControlRunbookDocument { constructor(stage: Construct, id: string, props: ControlRunbookProps) { super(stage, id, { ...props, diff --git a/source/playbooks/SC/ssmdocs/SC_CloudFront.1.ts b/source/playbooks/SC/ssmdocs/SC_CloudFront.1.ts new file mode 100644 index 00000000..24050533 --- /dev/null +++ b/source/playbooks/SC/ssmdocs/SC_CloudFront.1.ts @@ -0,0 +1,24 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { ControlRunbookDocument, ControlRunbookProps, RemediationScope } from './control_runbook'; +import { PlaybookProps } from '../lib/control_runbooks-construct'; +import { HardCodedString } from '@cdklabs/cdk-ssm-documents'; + +export function createControlRunbook(scope: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableCloudFrontDefaultRootObjectDocument(scope, id, { ...props, controlId: 'CloudFront.1' }); +} + +export class EnableCloudFrontDefaultRootObjectDocument extends ControlRunbookDocument { + constructor(stage: Construct, id: string, props: ControlRunbookProps) { + super(stage, id, { + ...props, + securityControlId: 'CloudFront.1', + remediationName: 'EnableCloudFrontDefaultRootObject', + scope: RemediationScope.GLOBAL, + resourceIdName: 'CloudFrontDistribution', + resourceIdRegex: String.raw`^(arn:(?:aws|aws-us-gov|aws-cn):cloudfront::\d{12}:distribution\/([A-Z0-9]+))$`, + updateDescription: HardCodedString.of('Configured default root object for CloudFront distribution'), + }); + } +} diff --git a/source/playbooks/SC/ssmdocs/SC_CloudFront.12.ts b/source/playbooks/SC/ssmdocs/SC_CloudFront.12.ts new file mode 100644 index 00000000..f6b30ad8 --- /dev/null +++ b/source/playbooks/SC/ssmdocs/SC_CloudFront.12.ts @@ -0,0 +1,24 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { ControlRunbookDocument, ControlRunbookProps, RemediationScope } from './control_runbook'; +import { PlaybookProps } from '../lib/control_runbooks-construct'; +import { HardCodedString } from '@cdklabs/cdk-ssm-documents'; + +export function createControlRunbook(scope: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new SetCloudFrontOriginDomainDocument(scope, id, { ...props, controlId: 'CloudFront.12' }); +} + +export class SetCloudFrontOriginDomainDocument extends ControlRunbookDocument { + constructor(stage: Construct, id: string, props: ControlRunbookProps) { + super(stage, id, { + ...props, + securityControlId: 'CloudFront.12', + remediationName: 'SetCloudFrontOriginDomain', + scope: RemediationScope.GLOBAL, + resourceIdName: 'DistributionId', + resourceIdRegex: String.raw`^arn:(?:aws|aws-cn|aws-us-gov):cloudfront::[0-9]{12}:distribution\/([A-Z0-9]*)$`, + updateDescription: HardCodedString.of('Set CloudFront origin domain to safe value.'), + }); + } +} diff --git a/source/playbooks/SC/ssmdocs/SC_CloudTrail.2.ts b/source/playbooks/SC/ssmdocs/SC_CloudTrail.2.ts index ff2d772e..7b3924be 100644 --- a/source/playbooks/SC/ssmdocs/SC_CloudTrail.2.ts +++ b/source/playbooks/SC/ssmdocs/SC_CloudTrail.2.ts @@ -3,7 +3,7 @@ import { Construct } from 'constructs'; import { ControlRunbookDocument, ControlRunbookProps, RemediationScope } from './control_runbook'; import { PlaybookProps } from '../lib/control_runbooks-construct'; -import { DataTypeEnum, HardCodedString, Input, Output, StringVariable } from '@cdklabs/cdk-ssm-documents'; +import { HardCodedString, Input, Output, StringVariable } from '@cdklabs/cdk-ssm-documents'; export function createControlRunbook(scope: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { return new EnableCloudTrailEncryptionDocument(scope, id, { ...props, controlId: 'CloudTrail.2' }); @@ -23,7 +23,7 @@ export class EnableCloudTrailEncryptionDocument extends ControlRunbookDocument { docInputs, securityControlId: 'CloudTrail.2', remediationName: 'EnableCloudTrailEncryption', - scope: RemediationScope.GLOBAL, + scope: RemediationScope.REGIONAL, resourceIdName: 'TrailArn', updateDescription: HardCodedString.of('Encryption enabled on CloudTrail'), }); @@ -32,13 +32,6 @@ export class EnableCloudTrailEncryptionDocument extends ControlRunbookDocument { /** @override */ protected getParseInputStepOutputs(): Output[] { const outputs = super.getParseInputStepOutputs(); - - outputs.push({ - name: 'RemediationRegion', - outputType: DataTypeEnum.STRING, - selector: '$.Payload.resource_region', - }); - return outputs; } @@ -48,7 +41,6 @@ export class EnableCloudTrailEncryptionDocument extends ControlRunbookDocument { const params = super.getRemediationParams(); params.TrailRegion = StringVariable.of('ParseInput.RemediationRegion'); - params.KMSKeyArn = StringVariable.of('KMSKeyArn'); return params; } diff --git a/source/playbooks/SC/ssmdocs/SC_CloudTrail.7.ts b/source/playbooks/SC/ssmdocs/SC_CloudTrail.7.ts index 1e45035f..0981ea5e 100644 --- a/source/playbooks/SC/ssmdocs/SC_CloudTrail.7.ts +++ b/source/playbooks/SC/ssmdocs/SC_CloudTrail.7.ts @@ -14,7 +14,11 @@ import { } from '@cdklabs/cdk-ssm-documents'; export function createControlRunbook(scope: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { - return new ConfigureS3BucketLoggingDocument(scope, id, { ...props, controlId: 'CloudTrail.7' }); + return new ConfigureS3BucketLoggingDocument(scope, id, { + ...props, + controlId: 'CloudTrail.7', + otherControlIds: ['S3.9'], + }); } export class ConfigureS3BucketLoggingDocument extends ControlRunbookDocument { @@ -44,7 +48,7 @@ export class ConfigureS3BucketLoggingDocument extends ControlRunbookDocument { BucketName: getTargetBucketName(this.solutionId), AutomationAssumeRole: new StringFormat( `arn:%s:iam::%s:role/${this.solutionId}-${createAccessLoggingBucketStepName}`, - [StringVariable.of('global:AWS_PARTITION'), StringVariable.of('global:ACCOUNT_ID')] + [StringVariable.of('global:AWS_PARTITION'), StringVariable.of('global:ACCOUNT_ID')], ), }), }); diff --git a/source/playbooks/SC/ssmdocs/SC_CloudWatch.1.ts b/source/playbooks/SC/ssmdocs/SC_CloudWatch.1.ts index f4609628..19c4c6db 100644 --- a/source/playbooks/SC/ssmdocs/SC_CloudWatch.1.ts +++ b/source/playbooks/SC/ssmdocs/SC_CloudWatch.1.ts @@ -71,7 +71,7 @@ export class CreateLogMetricFilterAndAlarmDocument extends ControlRunbookDocumen remediationName: 'CreateLogMetricFilterAndAlarm', scope: RemediationScope.GLOBAL, updateDescription: HardCodedString.of( - `Added metric filter to the log group and notifications to SNS topic ${snsTopicName}.` + `Added metric filter to the log group and notifications to SNS topic ${snsTopicName}.`, ), }); this.standardLongName = props.standardLongName; @@ -96,7 +96,7 @@ export class CreateLogMetricFilterAndAlarmDocument extends ControlRunbookDocumen const getMetricFilterAndAlarmInputValueStep = new ExecuteScriptStep(this, 'GetMetricFilterAndAlarmInputValue', { language: ScriptLanguage.fromRuntime(this.runtimePython.name, 'verify'), code: ScriptCode.fromFile( - fs.realpathSync(path.join(__dirname, '..', '..', 'common', 'cloudwatch_get_input_values.py')) + fs.realpathSync(path.join(__dirname, '..', '..', 'common', 'cloudwatch_get_input_values.py')), ), inputPayload: { ControlId: StringVariable.of('ParseInput.ControlId'), diff --git a/source/playbooks/SC/ssmdocs/SC_CodeBuild.2.ts b/source/playbooks/SC/ssmdocs/SC_CodeBuild.2.ts index 601b1972..76f456bb 100644 --- a/source/playbooks/SC/ssmdocs/SC_CodeBuild.2.ts +++ b/source/playbooks/SC/ssmdocs/SC_CodeBuild.2.ts @@ -9,7 +9,7 @@ export function createControlRunbook(scope: Construct, id: string, props: Playbo return new ReplaceCodeBuildClearTextCredentialsDocument(scope, id, { ...props, controlId: 'CodeBuild.2' }); } -class ReplaceCodeBuildClearTextCredentialsDocument extends ControlRunbookDocument { +export class ReplaceCodeBuildClearTextCredentialsDocument extends ControlRunbookDocument { constructor(stage: Construct, id: string, props: ControlRunbookProps) { super(stage, id, { ...props, diff --git a/source/playbooks/SC/ssmdocs/SC_CodeBuild.5.ts b/source/playbooks/SC/ssmdocs/SC_CodeBuild.5.ts new file mode 100644 index 00000000..26966aa4 --- /dev/null +++ b/source/playbooks/SC/ssmdocs/SC_CodeBuild.5.ts @@ -0,0 +1,24 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { ControlRunbookDocument, ControlRunbookProps, RemediationScope } from './control_runbook'; +import { PlaybookProps } from '../lib/control_runbooks-construct'; +import { HardCodedString } from '@cdklabs/cdk-ssm-documents'; + +export function createControlRunbook(scope: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new RemoveCodeBuildPrivilegedModeDocument(scope, id, { ...props, controlId: 'CodeBuild.5' }); +} + +export class RemoveCodeBuildPrivilegedModeDocument extends ControlRunbookDocument { + constructor(stage: Construct, id: string, props: ControlRunbookProps) { + super(stage, id, { + ...props, + securityControlId: 'CodeBuild.5', + remediationName: 'RemoveCodeBuildPrivilegedMode', + scope: RemediationScope.REGIONAL, + resourceIdName: 'ProjectName', + resourceIdRegex: String.raw`^arn:(?:aws|aws-cn|aws-us-gov):codebuild:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:project\/([A-Za-z0-9][A-Za-z0-9\-_]{1,254})$`, + updateDescription: HardCodedString.of('Removed CodeBuild privileged status.'), + }); + } +} diff --git a/source/playbooks/SC/ssmdocs/SC_EC2.1.ts b/source/playbooks/SC/ssmdocs/SC_EC2.1.ts index b4f95754..c0960e6a 100644 --- a/source/playbooks/SC/ssmdocs/SC_EC2.1.ts +++ b/source/playbooks/SC/ssmdocs/SC_EC2.1.ts @@ -17,7 +17,7 @@ export function createControlRunbook(scope: Construct, id: string, props: Playbo return new MakeEBSSnapshotsPrivateDocument(scope, id, { ...props, controlId: 'EC2.1' }); } -class MakeEBSSnapshotsPrivateDocument extends ControlRunbookDocument { +export class MakeEBSSnapshotsPrivateDocument extends ControlRunbookDocument { constructor(scope: Construct, id: string, props: ControlRunbookProps) { super(scope, id, { ...props, diff --git a/source/playbooks/SC/ssmdocs/SC_EC2.13.ts b/source/playbooks/SC/ssmdocs/SC_EC2.13.ts index 0aa00469..224e334e 100644 --- a/source/playbooks/SC/ssmdocs/SC_EC2.13.ts +++ b/source/playbooks/SC/ssmdocs/SC_EC2.13.ts @@ -20,7 +20,7 @@ export function createControlRunbook(scope: Construct, id: string, props: Playbo }); } -class DisablePublicAccessForSecurityGroupDocument extends ControlRunbookDocument { +export class DisablePublicAccessForSecurityGroupDocument extends ControlRunbookDocument { constructor(scope: Construct, id: string, props: ControlRunbookProps) { const resourceIdName = 'GroupId'; diff --git a/source/playbooks/SC/ssmdocs/SC_EC2.18.ts b/source/playbooks/SC/ssmdocs/SC_EC2.18.ts new file mode 100644 index 00000000..c3b1a0c7 --- /dev/null +++ b/source/playbooks/SC/ssmdocs/SC_EC2.18.ts @@ -0,0 +1,64 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../lib/control_runbooks-construct'; +import { ControlRunbookDocument, ControlRunbookProps, RemediationScope } from './control_runbook'; +import { HardCodedString, Output, DataTypeEnum, StringListVariable, AutomationStep } from '@cdklabs/cdk-ssm-documents'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new RevokeUnauthorizedInboundRulesDocument(stage, id, { ...props, controlId: 'EC2.18' }); +} + +export class RevokeUnauthorizedInboundRulesDocument extends ControlRunbookDocument { + constructor(stage: Construct, id: string, props: ControlRunbookProps) { + super(stage, id, { + ...props, + securityControlId: 'EC2.18', + remediationName: 'RevokeUnauthorizedInboundRules', + scope: RemediationScope.GLOBAL, + resourceIdName: 'SecurityGroupId', + resourceIdRegex: String.raw`^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:security-group/(sg-[0-9a-f]*)$`, + updateDescription: HardCodedString.of('Revoked unrestricted inbound security group rules on unauthorized ports.'), + }); + } + + /** @override */ + protected getExtraSteps(): AutomationStep[] { + return [ + super.getInputParamsStep({ + authorizedTcpPorts: ['80', '443'], + authorizedUdpPorts: [], + }), + ]; + } + + /** @override */ + protected getInputParamsStepOutput(): Output[] { + const AuthorizedTcpPorts: Output = { + name: 'authorizedTcpPorts', + outputType: DataTypeEnum.STRING_LIST, + selector: '$.Payload.authorizedTcpPorts', + }; + const AuthorizedUdpPorts: Output = { + name: 'authorizedUdpPorts', + outputType: DataTypeEnum.STRING_LIST, + selector: '$.Payload.authorizedUdpPorts', + }; + + const outputs: Output[] = [AuthorizedTcpPorts, AuthorizedUdpPorts]; + + return outputs; + } + + /** @override */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + protected getRemediationParams(): { [_: string]: any } { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const params: { [_: string]: any } = super.getRemediationParams(); + + params.AuthorizedTcpPorts = StringListVariable.of('GetInputParams.authorizedTcpPorts'); + params.AuthorizedUdpPorts = StringListVariable.of('GetInputParams.authorizedUdpPorts'); + + return params; + } +} diff --git a/source/playbooks/SC/ssmdocs/SC_EC2.19.ts b/source/playbooks/SC/ssmdocs/SC_EC2.19.ts new file mode 100644 index 00000000..26eb1706 --- /dev/null +++ b/source/playbooks/SC/ssmdocs/SC_EC2.19.ts @@ -0,0 +1,24 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../lib/control_runbooks-construct'; +import { ControlRunbookDocument, ControlRunbookProps, RemediationScope } from '../../SC/ssmdocs/control_runbook'; +import { HardCodedString } from '@cdklabs/cdk-ssm-documents'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new DisableUnrestrictedAccessToHighRiskPortsDocument(stage, id, { ...props, controlId: 'EC2.19' }); +} + +export class DisableUnrestrictedAccessToHighRiskPortsDocument extends ControlRunbookDocument { + constructor(stage: Construct, id: string, props: ControlRunbookProps) { + super(stage, id, { + ...props, + securityControlId: 'EC2.19', + remediationName: 'DisableUnrestrictedAccessToHighRiskPorts', + scope: RemediationScope.GLOBAL, + resourceIdName: 'SecurityGroupId', + resourceIdRegex: String.raw`^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:security-group/(sg-[0-9a-f]*)$`, + updateDescription: HardCodedString.of('Revoking access to high risk ports.'), + }); + } +} diff --git a/source/playbooks/SC/ssmdocs/SC_EC2.23.ts b/source/playbooks/SC/ssmdocs/SC_EC2.23.ts new file mode 100644 index 00000000..6bbf0eae --- /dev/null +++ b/source/playbooks/SC/ssmdocs/SC_EC2.23.ts @@ -0,0 +1,26 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../lib/control_runbooks-construct'; +import { ControlRunbookDocument, ControlRunbookProps, RemediationScope } from './control_runbook'; +import { HardCodedString } from '@cdklabs/cdk-ssm-documents'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new DisableTGWAutoAcceptSharedAttachmentsDocument(stage, id, { ...props, controlId: 'EC2.23' }); +} + +export class DisableTGWAutoAcceptSharedAttachmentsDocument extends ControlRunbookDocument { + constructor(stage: Construct, id: string, props: ControlRunbookProps) { + super(stage, id, { + ...props, + securityControlId: 'EC2.23', + remediationName: 'DisableTGWAutoAcceptSharedAttachments', + scope: RemediationScope.GLOBAL, + resourceIdName: 'TransitGatewayId', + resourceIdRegex: String.raw`^arn:(?:aws|aws-cn|aws-us-gov):ec2:[a-z]{2}-[a-z]+-\d{1}:\d{12}:transit-gateway\/(tgw-[a-z0-9\-]+)$`, + updateDescription: HardCodedString.of( + 'Disabling Transit Gateway from automatically accepting VPC attachment requests.', + ), + }); + } +} diff --git a/source/playbooks/SC/ssmdocs/SC_EC2.4.ts b/source/playbooks/SC/ssmdocs/SC_EC2.4.ts new file mode 100644 index 00000000..9e27d5b8 --- /dev/null +++ b/source/playbooks/SC/ssmdocs/SC_EC2.4.ts @@ -0,0 +1,60 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../lib/control_runbooks-construct'; +import { ControlRunbookDocument, ControlRunbookProps, RemediationScope } from './control_runbook'; +import { + HardCodedString, + DocumentOutput, + DataTypeEnum, + StringVariable, + StringFormat, + StringListVariable, +} from '@cdklabs/cdk-ssm-documents'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new TerminateEC2InstanceDocument(stage, id, { ...props, controlId: 'EC2.4' }); +} + +export class TerminateEC2InstanceDocument extends ControlRunbookDocument { + constructor(stage: Construct, id: string, props: ControlRunbookProps) { + super(stage, id, { + ...props, + securityControlId: 'EC2.4', + remediationName: 'TerminateEC2Instance', + scope: RemediationScope.GLOBAL, + resourceIdName: 'InstanceId', + resourceIdRegex: String.raw`^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:instance\/(i-[0-9a-f]*)$`, + updateDescription: HardCodedString.of('Terminated EC2 instance.'), + }); + } + + solutionAcronym = 'AWS'; + + docOutputs = this.getOutputs(); + + /** @override */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + protected getRemediationParams(): { [_: string]: any } { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const params: { [_: string]: any } = { + AutomationAssumeRole: new StringFormat(`arn:%s:iam::%s:role/%s`, [ + StringVariable.of('global:AWS_PARTITION'), + StringVariable.of('global:ACCOUNT_ID'), + StringVariable.of('RemediationRoleName'), + ]), + }; + + // Pass the resource ID only if used + if (this.resourceIdName) { + params[this.resourceIdName] = [StringListVariable.of(`ParseInput.${this.resourceIdName}`)]; + } + + return params; + } + + /** @override */ + protected getOutputs(): DocumentOutput[] { + return [{ name: 'ParseInput.AffectedObject', outputType: DataTypeEnum.STRING_MAP }]; + } +} diff --git a/source/playbooks/SC/ssmdocs/SC_EC2.6.ts b/source/playbooks/SC/ssmdocs/SC_EC2.6.ts index a3280fd9..02ba345a 100644 --- a/source/playbooks/SC/ssmdocs/SC_EC2.6.ts +++ b/source/playbooks/SC/ssmdocs/SC_EC2.6.ts @@ -18,7 +18,7 @@ export class EnableVPCFlowLogsDocument extends ControlRunbookDocument { scope: RemediationScope.REGIONAL, resourceIdName: 'VPC', resourceIdRegex: String.raw`^arn:(?:aws|aws-cn|aws-us-gov):ec2:.*:\d{12}:vpc\/(vpc-[0-9a-f]{8,17})$`, - updateDescription: HardCodedString.of('Removed rules on default security group'), + updateDescription: HardCodedString.of('Enabled VPC Flow logging.'), }); } @@ -30,7 +30,7 @@ export class EnableVPCFlowLogsDocument extends ControlRunbookDocument { params.RemediationRole = new StringFormat( `arn:%s:iam::%s:role/${this.solutionId}-EnableVPCFlowLogs-remediationRole`, - [StringVariable.of('global:AWS_PARTITION'), StringVariable.of('global:ACCOUNT_ID')] + [StringVariable.of('global:AWS_PARTITION'), StringVariable.of('global:ACCOUNT_ID')], ); return params; diff --git a/source/playbooks/SC/ssmdocs/SC_EC2.8.ts b/source/playbooks/SC/ssmdocs/SC_EC2.8.ts new file mode 100644 index 00000000..528b7b98 --- /dev/null +++ b/source/playbooks/SC/ssmdocs/SC_EC2.8.ts @@ -0,0 +1,23 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { ControlRunbookDocument, ControlRunbookProps, RemediationScope } from './control_runbook'; +import { PlaybookProps } from '../lib/control_runbooks-construct'; +import { HardCodedString } from '@cdklabs/cdk-ssm-documents'; + +export function createControlRunbook(scope: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableIMDSV2OnInstance(scope, id, { ...props, controlId: 'EC2.8' }); +} + +export class EnableIMDSV2OnInstance extends ControlRunbookDocument { + constructor(scope: Construct, id: string, props: ControlRunbookProps) { + super(scope, id, { + ...props, + securityControlId: 'EC2.8', + remediationName: 'EnableIMDSV2OnInstance', + scope: RemediationScope.GLOBAL, + resourceIdName: 'InstanceARN', + updateDescription: HardCodedString.of('Enabled IMDSv2 on Instance'), + }); + } +} diff --git a/source/playbooks/SC/ssmdocs/SC_ECR.1.ts b/source/playbooks/SC/ssmdocs/SC_ECR.1.ts new file mode 100644 index 00000000..c01250de --- /dev/null +++ b/source/playbooks/SC/ssmdocs/SC_ECR.1.ts @@ -0,0 +1,24 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../lib/control_runbooks-construct'; +import { ControlRunbookDocument, ControlRunbookProps, RemediationScope } from './control_runbook'; +import { HardCodedString } from '@cdklabs/cdk-ssm-documents'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnablePrivateRepositoryScanningDocument(stage, id, { ...props, controlId: 'ECR.1' }); +} + +export class EnablePrivateRepositoryScanningDocument extends ControlRunbookDocument { + constructor(stage: Construct, id: string, props: ControlRunbookProps) { + super(stage, id, { + ...props, + securityControlId: 'ECR.1', + remediationName: 'EnablePrivateRepositoryScanning', + scope: RemediationScope.GLOBAL, + resourceIdName: 'RepositoryName', + resourceIdRegex: String.raw`^arn:(?:aws|aws-cn|aws-us-gov):ecr:[a-z]{2}-[a-z]+-\d{1}:\d{12}:repository\/([a-z0-9._\/\-]+)$`, + updateDescription: HardCodedString.of('Enabling image scanning for private ECR repository.'), + }); + } +} diff --git a/source/playbooks/SC/ssmdocs/SC_GuardDuty.1.ts b/source/playbooks/SC/ssmdocs/SC_GuardDuty.1.ts new file mode 100644 index 00000000..1e273fd3 --- /dev/null +++ b/source/playbooks/SC/ssmdocs/SC_GuardDuty.1.ts @@ -0,0 +1,22 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../lib/control_runbooks-construct'; +import { ControlRunbookDocument, ControlRunbookProps, RemediationScope } from './control_runbook'; +import { HardCodedString } from '@cdklabs/cdk-ssm-documents'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableGuardDutyDocument(stage, id, { ...props, controlId: 'GuardDuty.1' }); +} + +export class EnableGuardDutyDocument extends ControlRunbookDocument { + constructor(stage: Construct, id: string, props: ControlRunbookProps) { + super(stage, id, { + ...props, + securityControlId: 'GuardDuty.1', + remediationName: 'EnableGuardDuty', + scope: RemediationScope.GLOBAL, + updateDescription: HardCodedString.of('Amazon GuardDuty enabled.'), + }); + } +} diff --git a/source/playbooks/SC/ssmdocs/SC_IAM.18.ts b/source/playbooks/SC/ssmdocs/SC_IAM.18.ts index 5a741bd3..911490d5 100644 --- a/source/playbooks/SC/ssmdocs/SC_IAM.18.ts +++ b/source/playbooks/SC/ssmdocs/SC_IAM.18.ts @@ -19,7 +19,7 @@ export class CreateIAMSupportRoleDocument extends ControlRunbookDocument { remediationName, scope: RemediationScope.GLOBAL, updateDescription: HardCodedString.of( - `Create an IAM role to allow authorized users to manage incidents with AWS Support using the ${props.solutionAcronym}-${remediationName} runbook.` + `Create an IAM role to allow authorized users to manage incidents with AWS Support using the ${props.solutionAcronym}-${remediationName} runbook.`, ), }); } diff --git a/source/playbooks/SC/ssmdocs/SC_IAM.7.ts b/source/playbooks/SC/ssmdocs/SC_IAM.7.ts index 9b010409..ba555572 100644 --- a/source/playbooks/SC/ssmdocs/SC_IAM.7.ts +++ b/source/playbooks/SC/ssmdocs/SC_IAM.7.ts @@ -3,7 +3,14 @@ import { Construct } from 'constructs'; import { ControlRunbookDocument, ControlRunbookProps, RemediationScope } from './control_runbook'; import { PlaybookProps } from '../lib/control_runbooks-construct'; -import { HardCodedBoolean, HardCodedNumber, HardCodedString } from '@cdklabs/cdk-ssm-documents'; +import { + AutomationStep, + DataTypeEnum, + HardCodedString, + BooleanVariable, + NumberVariable, + Output, +} from '@cdklabs/cdk-ssm-documents'; export function createControlRunbook(scope: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { return new SetIAMPasswordPolicyDocument(scope, id, { @@ -23,10 +30,89 @@ export class SetIAMPasswordPolicyDocument extends ControlRunbookDocument { remediationName, scope: RemediationScope.GLOBAL, updateDescription: HardCodedString.of( - `Established a baseline password policy using the ${props.solutionAcronym}-${remediationName} runbook.` + `Established a baseline password policy using the ${props.solutionAcronym}-${remediationName} runbook.`, ), }); } + /** @override */ + protected getExtraSteps(): AutomationStep[] { + return [ + super.getInputParamsStep({ + AllowUsersToChangePassword: 'True', + HardExpiry: 'True', + MaxPasswordAge: '90', + MinimumPasswordLength: '14', + RequireSymbols: 'True', + RequireNumbers: 'True', + RequireUppercaseCharacters: 'True', + RequireLowercaseCharacters: 'True', + PasswordReusePrevention: '24', + }), + ]; + } + + /** @override */ + protected getInputParamsStepOutput(): Output[] { + const AllowUsersToChangePasswordOutput: Output = { + name: 'AllowUsersToChangePassword', + outputType: DataTypeEnum.BOOLEAN, + selector: '$.Payload.AllowUsersToChangePassword', + }; + const HardExpiryOutput: Output = { + name: 'HardExpiry', + outputType: DataTypeEnum.BOOLEAN, + selector: '$.Payload.HardExpiry', + }; + const MaxPasswordAgeOutput: Output = { + name: 'MaxPasswordAge', + outputType: DataTypeEnum.INTEGER, + selector: '$.Payload.MaxPasswordAge', + }; + const MinimumPasswordLengthOutput: Output = { + name: 'MinimumPasswordLength', + outputType: DataTypeEnum.INTEGER, + selector: '$.Payload.MinimumPasswordLength', + }; + const RequireSymbolsOutput: Output = { + name: 'RequireSymbols', + outputType: DataTypeEnum.BOOLEAN, + selector: '$.Payload.RequireSymbols', + }; + const RequireNumbersOutput: Output = { + name: 'RequireNumbers', + outputType: DataTypeEnum.BOOLEAN, + selector: '$.Payload.RequireNumbers', + }; + const RequireUppercaseCharactersOutput: Output = { + name: 'RequireUppercaseCharacters', + outputType: DataTypeEnum.BOOLEAN, + selector: '$.Payload.RequireUppercaseCharacters', + }; + const RequireLowercaseCharactersOutput: Output = { + name: 'RequireLowercaseCharacters', + outputType: DataTypeEnum.BOOLEAN, + selector: '$.Payload.RequireLowercaseCharacters', + }; + const PasswordReusePreventionOutput: Output = { + name: 'PasswordReusePrevention', + outputType: DataTypeEnum.INTEGER, + selector: '$.Payload.PasswordReusePrevention', + }; + + const outputs: Output[] = [ + AllowUsersToChangePasswordOutput, + HardExpiryOutput, + MaxPasswordAgeOutput, + MinimumPasswordLengthOutput, + RequireSymbolsOutput, + RequireNumbersOutput, + RequireUppercaseCharactersOutput, + RequireLowercaseCharactersOutput, + PasswordReusePreventionOutput, + ]; + + return outputs; + } /** @override */ // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -34,15 +120,15 @@ export class SetIAMPasswordPolicyDocument extends ControlRunbookDocument { // eslint-disable-next-line @typescript-eslint/no-explicit-any const params: { [_: string]: any } = super.getRemediationParams(); - params.AllowUsersToChangePassword = HardCodedBoolean.TRUE; - params.HardExpiry = HardCodedBoolean.TRUE; - params.MaxPasswordAge = HardCodedNumber.of(90); - params.MinimumPasswordLength = HardCodedNumber.of(14); - params.RequireSymbols = HardCodedBoolean.TRUE; - params.RequireNumbers = HardCodedBoolean.TRUE; - params.RequireUppercaseCharacters = HardCodedBoolean.TRUE; - params.RequireLowercaseCharacters = HardCodedBoolean.TRUE; - params.PasswordReusePrevention = HardCodedNumber.of(24); + params.AllowUsersToChangePassword = BooleanVariable.of('GetInputParams.AllowUsersToChangePassword'); + params.HardExpiry = BooleanVariable.of('GetInputParams.HardExpiry'); + params.MaxPasswordAge = NumberVariable.of('GetInputParams.MaxPasswordAge'); + params.MinimumPasswordLength = NumberVariable.of('GetInputParams.MinimumPasswordLength'); + params.RequireSymbols = BooleanVariable.of('GetInputParams.RequireSymbols'); + params.RequireNumbers = BooleanVariable.of('GetInputParams.RequireNumbers'); + params.RequireUppercaseCharacters = BooleanVariable.of('GetInputParams.RequireUppercaseCharacters'); + params.RequireLowercaseCharacters = BooleanVariable.of('GetInputParams.RequireLowercaseCharacters'); + params.PasswordReusePrevention = NumberVariable.of('GetInputParams.PasswordReusePrevention'); return params; } diff --git a/source/playbooks/SC/ssmdocs/SC_IAM.8.ts b/source/playbooks/SC/ssmdocs/SC_IAM.8.ts index 67242911..d16ce946 100644 --- a/source/playbooks/SC/ssmdocs/SC_IAM.8.ts +++ b/source/playbooks/SC/ssmdocs/SC_IAM.8.ts @@ -20,7 +20,7 @@ export class RevokeUnusedIAMUserCredentialsDocument extends ControlRunbookDocume remediationName, scope: RemediationScope.GLOBAL, updateDescription: HardCodedString.of( - `Deactivated unused keys and expired logins using the ${props.solutionAcronym}-${remediationName} runbook.` + `Deactivated unused keys and expired logins using the ${props.solutionAcronym}-${remediationName} runbook.`, ), }); this.maxCredentialUsageAge = props.parameterToPass ?? '90'; diff --git a/source/playbooks/SC/ssmdocs/SC_Lambda.1.ts b/source/playbooks/SC/ssmdocs/SC_Lambda.1.ts index fa9988da..82b8cc4c 100644 --- a/source/playbooks/SC/ssmdocs/SC_Lambda.1.ts +++ b/source/playbooks/SC/ssmdocs/SC_Lambda.1.ts @@ -9,7 +9,7 @@ export function createControlRunbook(scope: Construct, id: string, props: Playbo return new RemoveLambdaPublicAccessDocument(scope, id, { ...props, controlId: 'Lambda.1' }); } -class RemoveLambdaPublicAccessDocument extends ControlRunbookDocument { +export class RemoveLambdaPublicAccessDocument extends ControlRunbookDocument { constructor(scope: Construct, id: string, props: ControlRunbookProps) { const resourceIdName = 'FunctionName'; diff --git a/source/playbooks/SC/ssmdocs/SC_RDS.1.ts b/source/playbooks/SC/ssmdocs/SC_RDS.1.ts index 5450d89c..2f6ce03b 100644 --- a/source/playbooks/SC/ssmdocs/SC_RDS.1.ts +++ b/source/playbooks/SC/ssmdocs/SC_RDS.1.ts @@ -16,7 +16,7 @@ export function createControlRunbook(scope: Construct, id: string, props: Playbo return new MakeRDSSnapshotPrivateDocument(scope, id, { ...props, controlId: 'RDS.1' }); } -class MakeRDSSnapshotPrivateDocument extends ControlRunbookDocument { +export class MakeRDSSnapshotPrivateDocument extends ControlRunbookDocument { constructor(scope: Construct, id: string, props: ControlRunbookProps) { super(scope, id, { ...props, @@ -24,7 +24,7 @@ class MakeRDSSnapshotPrivateDocument extends ControlRunbookDocument { remediationName: 'MakeRDSSnapshotPrivate', scope: RemediationScope.REGIONAL, resourceIdName: 'DBSnapshotId', - resourceIdRegex: String.raw`^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:(cluster-snapshot|snapshot):([a-zA-Z](?:[0-9a-zA-Z]+-)*[0-9a-zA-Z]+)$`, + resourceIdRegex: String.raw`^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:(cluster-snapshot|snapshot):([a-zA-Z][0-9a-zA-Z]*(?:-[0-9a-zA-Z]+)*)$`, updateDescription: HardCodedString.of('RDS DB Snapshot modified to private'), }); } diff --git a/source/playbooks/SC/ssmdocs/SC_RDS.13.ts b/source/playbooks/SC/ssmdocs/SC_RDS.13.ts index bf1a19ae..e9d21f49 100644 --- a/source/playbooks/SC/ssmdocs/SC_RDS.13.ts +++ b/source/playbooks/SC/ssmdocs/SC_RDS.13.ts @@ -9,7 +9,7 @@ export function createControlRunbook(scope: Construct, id: string, props: Playbo return new EnableMinorVersionUpgradeOnRDSDBInstanceDocument(scope, id, { ...props, controlId: 'RDS.13' }); } -class EnableMinorVersionUpgradeOnRDSDBInstanceDocument extends ControlRunbookDocument { +export class EnableMinorVersionUpgradeOnRDSDBInstanceDocument extends ControlRunbookDocument { constructor(scope: Construct, id: string, props: ControlRunbookProps) { super(scope, id, { ...props, diff --git a/source/playbooks/SC/ssmdocs/SC_RDS.16.ts b/source/playbooks/SC/ssmdocs/SC_RDS.16.ts index 92bd04f7..127067bc 100644 --- a/source/playbooks/SC/ssmdocs/SC_RDS.16.ts +++ b/source/playbooks/SC/ssmdocs/SC_RDS.16.ts @@ -9,7 +9,7 @@ export function createControlRunbook(scope: Construct, id: string, props: Playbo return new EnableCopyTagsToSnapshotOnRDSClusterDocument(scope, id, { ...props, controlId: 'RDS.16' }); } -class EnableCopyTagsToSnapshotOnRDSClusterDocument extends ControlRunbookDocument { +export class EnableCopyTagsToSnapshotOnRDSClusterDocument extends ControlRunbookDocument { constructor(scope: Construct, id: string, props: ControlRunbookProps) { super(scope, id, { ...props, diff --git a/source/playbooks/SC/ssmdocs/SC_RDS.2.ts b/source/playbooks/SC/ssmdocs/SC_RDS.2.ts index 2ed39bfa..c4f78902 100644 --- a/source/playbooks/SC/ssmdocs/SC_RDS.2.ts +++ b/source/playbooks/SC/ssmdocs/SC_RDS.2.ts @@ -9,7 +9,7 @@ export function createControlRunbook(scope: Construct, id: string, props: Playbo return new DisablePublicAccessToRDSInstanceDocument(scope, id, { ...props, controlId: 'RDS.2' }); } -class DisablePublicAccessToRDSInstanceDocument extends ControlRunbookDocument { +export class DisablePublicAccessToRDSInstanceDocument extends ControlRunbookDocument { constructor(scope: Construct, id: string, props: ControlRunbookProps) { super(scope, id, { ...props, diff --git a/source/playbooks/SC/ssmdocs/SC_RDS.4.ts b/source/playbooks/SC/ssmdocs/SC_RDS.4.ts index 88897007..3e59cacf 100644 --- a/source/playbooks/SC/ssmdocs/SC_RDS.4.ts +++ b/source/playbooks/SC/ssmdocs/SC_RDS.4.ts @@ -18,7 +18,7 @@ export function createControlRunbook(scope: Construct, id: string, props: Playbo return new EncryptRDSSnapshotDocument(scope, id, { ...props, controlId: 'RDS.4' }); } -class EncryptRDSSnapshotDocument extends ControlRunbookDocument { +export class EncryptRDSSnapshotDocument extends ControlRunbookDocument { constructor(scope: Construct, id: string, props: ControlRunbookProps) { const docInputs: Input[] = [ Input.ofTypeString('KMSKeyId', { @@ -35,7 +35,7 @@ class EncryptRDSSnapshotDocument extends ControlRunbookDocument { securityControlId: 'RDS.4', remediationName: 'EncryptRDSSnapshot', scope: RemediationScope.REGIONAL, - resourceIdRegex: String.raw`^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:((?:cluster-)?snapshot|dbclustersnapshot):((?:rds:)?((?!.*--.*)(?!.*-$)[a-zA-Z][a-zA-Z0-9-]{0,254}))$`, + resourceIdRegex: String.raw`^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:((?:cluster-)?snapshot|dbclustersnapshot):((?:rds:|awsbackup:)?((?!.*--.*)(?!.*-$)[a-zA-Z][a-zA-Z0-9-]{0,254}))$`, updateDescription: HardCodedString.of('Encrypted RDS snapshot'), }); } @@ -68,7 +68,7 @@ class EncryptRDSSnapshotDocument extends ControlRunbookDocument { name: 'DBSnapshotType', outputType: DataTypeEnum.STRING, selector: '$.Payload.matches[0]', - } + }, ); return outputs; diff --git a/source/playbooks/SC/ssmdocs/SC_RDS.5.ts b/source/playbooks/SC/ssmdocs/SC_RDS.5.ts index 26eb7710..6ae18eb7 100644 --- a/source/playbooks/SC/ssmdocs/SC_RDS.5.ts +++ b/source/playbooks/SC/ssmdocs/SC_RDS.5.ts @@ -9,7 +9,7 @@ export function createControlRunbook(scope: Construct, id: string, props: Playbo return new EnableMultiAZOnRDSInstanceDocument(scope, id, { ...props, controlId: 'RDS.5' }); } -class EnableMultiAZOnRDSInstanceDocument extends ControlRunbookDocument { +export class EnableMultiAZOnRDSInstanceDocument extends ControlRunbookDocument { constructor(scope: Construct, id: string, props: ControlRunbookProps) { super(scope, id, { ...props, diff --git a/source/playbooks/SC/ssmdocs/SC_RDS.6.ts b/source/playbooks/SC/ssmdocs/SC_RDS.6.ts index 05b18ce8..272f3959 100644 --- a/source/playbooks/SC/ssmdocs/SC_RDS.6.ts +++ b/source/playbooks/SC/ssmdocs/SC_RDS.6.ts @@ -17,7 +17,7 @@ export function createControlRunbook(scope: Construct, id: string, props: Playbo return new EnableEnhancedMonitoringOnRDSInstanceDocument(scope, id, { ...props, controlId: 'RDS.6' }); } -class EnableEnhancedMonitoringOnRDSInstanceDocument extends ControlRunbookDocument { +export class EnableEnhancedMonitoringOnRDSInstanceDocument extends ControlRunbookDocument { constructor(scope: Construct, id: string, props: ControlRunbookProps) { super(scope, id, { ...props, diff --git a/source/playbooks/SC/ssmdocs/SC_RDS.7.ts b/source/playbooks/SC/ssmdocs/SC_RDS.7.ts index 58638af2..0a7786ae 100644 --- a/source/playbooks/SC/ssmdocs/SC_RDS.7.ts +++ b/source/playbooks/SC/ssmdocs/SC_RDS.7.ts @@ -9,7 +9,7 @@ export function createControlRunbook(scope: Construct, id: string, props: Playbo return new EnableRDSClusterDeletionProtectionDocument(scope, id, { ...props, controlId: 'RDS.7' }); } -class EnableRDSClusterDeletionProtectionDocument extends ControlRunbookDocument { +export class EnableRDSClusterDeletionProtectionDocument extends ControlRunbookDocument { constructor(scope: Construct, id: string, props: ControlRunbookProps) { super(scope, id, { ...props, diff --git a/source/playbooks/SC/ssmdocs/SC_RDS.8.ts b/source/playbooks/SC/ssmdocs/SC_RDS.8.ts index 08448db3..b9f04d28 100644 --- a/source/playbooks/SC/ssmdocs/SC_RDS.8.ts +++ b/source/playbooks/SC/ssmdocs/SC_RDS.8.ts @@ -9,7 +9,7 @@ export function createControlRunbook(scope: Construct, id: string, props: Playbo return new EnableRDSInstanceDeletionProtectionDocument(scope, id, { ...props, controlId: 'RDS.8' }); } -class EnableRDSInstanceDeletionProtectionDocument extends ControlRunbookDocument { +export class EnableRDSInstanceDeletionProtectionDocument extends ControlRunbookDocument { constructor(scope: Construct, id: string, props: ControlRunbookProps) { super(scope, id, { ...props, diff --git a/source/playbooks/SC/ssmdocs/SC_Redshift.1.ts b/source/playbooks/SC/ssmdocs/SC_Redshift.1.ts index a9536500..d700e83b 100644 --- a/source/playbooks/SC/ssmdocs/SC_Redshift.1.ts +++ b/source/playbooks/SC/ssmdocs/SC_Redshift.1.ts @@ -9,7 +9,7 @@ export function createControlRunbook(scope: Construct, id: string, props: Playbo return new DisablePublicAccessToRedshiftClusterDocument(scope, id, { ...props, controlId: 'Redshift.1' }); } -class DisablePublicAccessToRedshiftClusterDocument extends ControlRunbookDocument { +export class DisablePublicAccessToRedshiftClusterDocument extends ControlRunbookDocument { constructor(scope: Construct, id: string, props: ControlRunbookProps) { super(scope, id, { ...props, diff --git a/source/playbooks/SC/ssmdocs/SC_Redshift.3.ts b/source/playbooks/SC/ssmdocs/SC_Redshift.3.ts index 9449950b..91e19c1e 100644 --- a/source/playbooks/SC/ssmdocs/SC_Redshift.3.ts +++ b/source/playbooks/SC/ssmdocs/SC_Redshift.3.ts @@ -20,7 +20,7 @@ export function createControlRunbook(scope: Construct, id: string, props: Playbo return new EnableAutomaticSnapshotsOnRedshiftClusterDocument(scope, id, { ...props, controlId: 'Redshift.3' }); } -class EnableAutomaticSnapshotsOnRedshiftClusterDocument extends ControlRunbookDocument { +export class EnableAutomaticSnapshotsOnRedshiftClusterDocument extends ControlRunbookDocument { constructor(scope: Construct, id: string, props: ControlRunbookProps) { super(scope, id, { ...props, diff --git a/source/playbooks/SC/ssmdocs/SC_Redshift.4.ts b/source/playbooks/SC/ssmdocs/SC_Redshift.4.ts index a46494bf..1fb92c30 100644 --- a/source/playbooks/SC/ssmdocs/SC_Redshift.4.ts +++ b/source/playbooks/SC/ssmdocs/SC_Redshift.4.ts @@ -25,7 +25,7 @@ export function createControlRunbook(scope: Construct, id: string, props: Playbo return new EnableRedshiftClusterAuditLoggingDocument(scope, id, { ...props, controlId: 'Redshift.4' }); } -class EnableRedshiftClusterAuditLoggingDocument extends ControlRunbookDocument { +export class EnableRedshiftClusterAuditLoggingDocument extends ControlRunbookDocument { constructor(scope: Construct, id: string, props: ControlRunbookProps) { super(scope, id, { ...props, diff --git a/source/playbooks/SC/ssmdocs/SC_Redshift.6.ts b/source/playbooks/SC/ssmdocs/SC_Redshift.6.ts index 9c365aac..84a41ddf 100644 --- a/source/playbooks/SC/ssmdocs/SC_Redshift.6.ts +++ b/source/playbooks/SC/ssmdocs/SC_Redshift.6.ts @@ -20,7 +20,7 @@ export function createControlRunbook(scope: Construct, id: string, props: Playbo return new EnableAutomaticVersionUpgradeOnRedshiftClusterDocument(scope, id, { ...props, controlId: 'Redshift.6' }); } -class EnableAutomaticVersionUpgradeOnRedshiftClusterDocument extends ControlRunbookDocument { +export class EnableAutomaticVersionUpgradeOnRedshiftClusterDocument extends ControlRunbookDocument { constructor(scope: Construct, id: string, props: ControlRunbookProps) { super(scope, id, { ...props, diff --git a/source/playbooks/SC/ssmdocs/SC_S3.11.ts b/source/playbooks/SC/ssmdocs/SC_S3.11.ts new file mode 100644 index 00000000..18dde922 --- /dev/null +++ b/source/playbooks/SC/ssmdocs/SC_S3.11.ts @@ -0,0 +1,88 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { ControlRunbookDocument, ControlRunbookProps, RemediationScope } from './control_runbook'; +import { PlaybookProps } from '../lib/control_runbooks-construct'; +import { + AutomationStep, + DataTypeEnum, + HardCodedString, + Output, + StringListVariable, + StringVariable, +} from '@cdklabs/cdk-ssm-documents'; + +export function createControlRunbook(scope: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableBucketEventNotificationsDocument(scope, id, { ...props, controlId: 'S3.11' }); +} + +export class EnableBucketEventNotificationsDocument extends ControlRunbookDocument { + constructor(scope: Construct, id: string, props: ControlRunbookProps) { + super(scope, id, { + ...props, + securityControlId: 'S3.11', + remediationName: 'EnableBucketEventNotifications', + scope: RemediationScope.GLOBAL, + resourceIdName: 'BucketName', + resourceIdRegex: String.raw`^arn:(?:aws|aws-cn|aws-us-gov):s3:::([A-Za-z0-9.-]{3,63})$`, + updateDescription: HardCodedString.of('Configured event notifications to an S3 Bucket.'), + }); + } + + /** @override */ + protected getExtraSteps(): AutomationStep[] { + return [ + super.getInputParamsStep({ + eventTypes: [ + 's3:ReducedRedundancyLostObject', + 's3:ObjectCreated:*', + 's3:ObjectRemoved:*', + 's3:ObjectRestore:*', + 's3:Replication:*', + 's3:LifecycleExpiration:*', + 's3:LifecycleTransition', + 's3:IntelligentTiering', + 's3:ObjectTagging:*', + 's3:ObjectAcl:Put', + ], + }), + ]; + } + + /** @override */ + protected getInputParamsStepOutput(): Output[] { + const EventTypes: Output = { + name: 'eventTypes', + outputType: DataTypeEnum.STRING_LIST, + selector: '$.Payload.eventTypes', + }; + + const outputs: Output[] = [EventTypes]; + + return outputs; + } + + /** @override */ + protected getParseInputStepOutputs(): Output[] { + const outputs = super.getParseInputStepOutputs(); + + outputs.push({ + name: 'RemediationAccount', + outputType: DataTypeEnum.STRING, + selector: '$.Payload.account_id', + }); + + return outputs; + } + + /** @override */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + protected getRemediationParams(): { [_: string]: any } { + const params = super.getRemediationParams(); + + params.AccountId = StringVariable.of('ParseInput.RemediationAccount'); + params.EventTypes = StringListVariable.of('GetInputParams.eventTypes'); + + return params; + } +} diff --git a/source/playbooks/SC/ssmdocs/SC_S3.13.ts b/source/playbooks/SC/ssmdocs/SC_S3.13.ts new file mode 100644 index 00000000..83b1ce61 --- /dev/null +++ b/source/playbooks/SC/ssmdocs/SC_S3.13.ts @@ -0,0 +1,78 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { PlaybookProps } from '../lib/control_runbooks-construct'; +import { ControlRunbookDocument, ControlRunbookProps, RemediationScope } from './control_runbook'; +import { + AutomationStep, + DataTypeEnum, + HardCodedString, + NumberVariable, + Output, + StringVariable, +} from '@cdklabs/cdk-ssm-documents'; + +export function createControlRunbook(stage: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new SetS3LifecyclePolicyDocument(stage, id, { ...props, controlId: 'S3.13' }); +} + +export class SetS3LifecyclePolicyDocument extends ControlRunbookDocument { + constructor(stage: Construct, id: string, props: ControlRunbookProps) { + super(stage, id, { + ...props, + securityControlId: 'S3.13', + remediationName: 'SetS3LifecyclePolicy', + scope: RemediationScope.GLOBAL, + resourceIdName: 'BucketName', + resourceIdRegex: String.raw`^arn:(?:aws|aws-cn|aws-us-gov):s3:::([a-z0-9.-]{3,63})$`, + updateDescription: HardCodedString.of('Setting an example lifecycle policy on the S3 bucket.'), + }); + } + + /** @override */ + protected getExtraSteps(): AutomationStep[] { + return [ + super.getInputParamsStep({ + targetTransitionDays: 30, + targetExpirationDays: 0, + targetTransitionStorageClass: 'INTELLIGENT_TIERING', + }), + ]; + } + + /** @override */ + protected getInputParamsStepOutput(): Output[] { + const TargetTransitionDays: Output = { + name: 'targetTransitionDays', + outputType: DataTypeEnum.INTEGER, + selector: '$.Payload.targetTransitionDays', + }; + const TargetExpirationDays: Output = { + name: 'targetExpirationDays', + outputType: DataTypeEnum.INTEGER, + selector: '$.Payload.targetExpirationDays', + }; + const TargetTransitionStorageClass: Output = { + name: 'targetTransitionStorageClass', + outputType: DataTypeEnum.STRING, + selector: '$.Payload.targetTransitionStorageClass', + }; + + const outputs: Output[] = [TargetTransitionDays, TargetExpirationDays, TargetTransitionStorageClass]; + + return outputs; + } + + /** @override */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + protected getRemediationParams(): { [_: string]: any } { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const params: { [_: string]: any } = super.getRemediationParams(); + + params.TargetTransitionDays = NumberVariable.of('GetInputParams.targetTransitionDays'); + params.TargetExpirationDays = NumberVariable.of('GetInputParams.targetExpirationDays'); + params.TargetTransitionStorageClass = StringVariable.of('GetInputParams.targetTransitionStorageClass'); + + return params; + } +} diff --git a/source/playbooks/SC/ssmdocs/SC_S3.6.ts b/source/playbooks/SC/ssmdocs/SC_S3.6.ts index 103ed36d..1088551a 100644 --- a/source/playbooks/SC/ssmdocs/SC_S3.6.ts +++ b/source/playbooks/SC/ssmdocs/SC_S3.6.ts @@ -20,7 +20,7 @@ export function createControlRunbook(scope: Construct, id: string, props: Playbo return new S3BlockDenylistDocument(scope, id, { ...props, controlId: 'S3.6' }); } -class S3BlockDenylistDocument extends ControlRunbookDocument { +export class S3BlockDenylistDocument extends ControlRunbookDocument { constructor(scope: Construct, id: string, props: ControlRunbookProps) { super(scope, id, { ...props, @@ -58,7 +58,7 @@ class S3BlockDenylistDocument extends ControlRunbookDocument { new ExecuteScriptStep(this, 'ExtractSensitiveApis', { language: ScriptLanguage.fromRuntime(this.runtimePython.name, 'runbook_handler'), code: ScriptCode.fromFile( - fs.realpathSync(path.join(__dirname, '..', '..', 'AFSBP', 'ssmdocs', 'scripts', 'deserializeApiList.py')) + fs.realpathSync(path.join(__dirname, '..', '..', 'AFSBP', 'ssmdocs', 'scripts', 'deserializeApiList.py')), ), outputs: [ { diff --git a/source/playbooks/SC/ssmdocs/SC_SNS.1.ts b/source/playbooks/SC/ssmdocs/SC_SNS.1.ts index 7f6244bc..5daf0714 100644 --- a/source/playbooks/SC/ssmdocs/SC_SNS.1.ts +++ b/source/playbooks/SC/ssmdocs/SC_SNS.1.ts @@ -24,7 +24,7 @@ export class EnableEncryptionForSNSTopicDocument extends ControlRunbookDocument securityControlId: 'SNS.1', remediationName: 'EnableEncryptionForSNSTopic', scope: RemediationScope.REGIONAL, - resourceIdName: 'SNSTopicArn', + resourceIdName: 'TopicArn', updateDescription: HardCodedString.of('Encryption enabled on SNS Topic'), }); } @@ -34,7 +34,7 @@ export class EnableEncryptionForSNSTopicDocument extends ControlRunbookDocument protected getRemediationParams(): { [_: string]: any } { const params = super.getRemediationParams(); params.KmsKeyArn = StringVariable.of('KmsKeyArn'); - params.SNSTopicArn = StringVariable.of('ParseInput.SNSTopicArn'); + params.TopicArn = StringVariable.of('ParseInput.TopicArn'); return params; } } diff --git a/source/playbooks/SC/ssmdocs/SC_SNS.2.ts b/source/playbooks/SC/ssmdocs/SC_SNS.2.ts index b9691085..51fd063e 100644 --- a/source/playbooks/SC/ssmdocs/SC_SNS.2.ts +++ b/source/playbooks/SC/ssmdocs/SC_SNS.2.ts @@ -3,7 +3,7 @@ import { Construct } from 'constructs'; import { ControlRunbookDocument, ControlRunbookProps, RemediationScope } from './control_runbook'; import { PlaybookProps } from '../lib/control_runbooks-construct'; -import { HardCodedString, Input, StringVariable } from '@cdklabs/cdk-ssm-documents'; +import { HardCodedString, StringFormat, StringVariable } from '@cdklabs/cdk-ssm-documents'; export function createControlRunbook(scope: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { return new EnableDeliveryLoggingForSNSTopicDocument(scope, id, { ...props, controlId: 'SNS.2' }); @@ -11,18 +11,10 @@ export function createControlRunbook(scope: Construct, id: string, props: Playbo export class EnableDeliveryLoggingForSNSTopicDocument extends ControlRunbookDocument { constructor(scope: Construct, id: string, props: ControlRunbookProps) { - const docInputs = [ - Input.ofTypeString('LoggingRole', { - allowedPattern: String.raw`^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$`, - defaultValue: '{{ssm:/Solutions/SO0111/DeliveryStatusLoggingRole}}', - }), - ]; - super(scope, id, { ...props, - docInputs, securityControlId: 'SNS.2', - remediationName: 'EnableDeliveryLoggingForSNSTopic', + remediationName: 'EnableDeliveryStatusLoggingForSNSTopic', scope: RemediationScope.REGIONAL, resourceIdName: 'SNSTopicArn', updateDescription: HardCodedString.of('Delivery Status Logging enabled on SNS Topic'), @@ -34,7 +26,12 @@ export class EnableDeliveryLoggingForSNSTopicDocument extends ControlRunbookDocu protected getRemediationParams(): { [_: string]: any } { const params = super.getRemediationParams(); params.SNSTopicArn = StringVariable.of('ParseInput.SNSTopicArn'); - params.LoggingRole = StringVariable.of('LoggingRole'); + + params.LoggingRole = new StringFormat( + `arn:%s:iam::%s:role/${this.solutionId}-SNS2DeliveryStatusLogging-remediationRole`, + [StringVariable.of('global:AWS_PARTITION'), StringVariable.of('global:ACCOUNT_ID')], + ); + return params; } } diff --git a/source/playbooks/SC/ssmdocs/SC_SQS.1.ts b/source/playbooks/SC/ssmdocs/SC_SQS.1.ts index 895d28ac..36ae0e1e 100644 --- a/source/playbooks/SC/ssmdocs/SC_SQS.1.ts +++ b/source/playbooks/SC/ssmdocs/SC_SQS.1.ts @@ -36,7 +36,6 @@ export class EnableEncryptionForSQSQueueDocument extends ControlRunbookDocument const params = super.getRemediationParams(); params.KmsKeyArn = StringVariable.of('KmsKeyArn'); - params.SNSTopicArn = StringVariable.of('ParseInput.SQSQueueName'); return params; } diff --git a/source/playbooks/SC/ssmdocs/SC_SSM.4.ts b/source/playbooks/SC/ssmdocs/SC_SSM.4.ts new file mode 100644 index 00000000..454cf800 --- /dev/null +++ b/source/playbooks/SC/ssmdocs/SC_SSM.4.ts @@ -0,0 +1,23 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { ControlRunbookDocument, ControlRunbookProps, RemediationScope } from './control_runbook'; +import { PlaybookProps } from '../lib/control_runbooks-construct'; +import { HardCodedString } from '@cdklabs/cdk-ssm-documents'; + +export function createControlRunbook(scope: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new DisablePublicSSMDocument(scope, id, { ...props, controlId: 'SSM.4' }); +} + +export class DisablePublicSSMDocument extends ControlRunbookDocument { + constructor(scope: Construct, id: string, props: ControlRunbookProps) { + super(scope, id, { + ...props, + securityControlId: 'SSM.4', + remediationName: 'BlockSSMDocumentPublicAccess', + scope: RemediationScope.REGIONAL, + resourceIdName: 'DocumentArn', + updateDescription: HardCodedString.of('SSM document changed from public to private'), + }); + } +} diff --git a/source/playbooks/SC/ssmdocs/SC_SecretsManager.1.ts b/source/playbooks/SC/ssmdocs/SC_SecretsManager.1.ts new file mode 100644 index 00000000..11a8d108 --- /dev/null +++ b/source/playbooks/SC/ssmdocs/SC_SecretsManager.1.ts @@ -0,0 +1,55 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { ControlRunbookDocument, ControlRunbookProps, RemediationScope } from './control_runbook'; +import { PlaybookProps } from '../lib/control_runbooks-construct'; +import { AutomationStep, DataTypeEnum, HardCodedString, Output, NumberVariable } from '@cdklabs/cdk-ssm-documents'; + +export function createControlRunbook(scope: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new EnableAutoSecretRotationDocument(scope, id, { ...props, controlId: 'SecretsManager.1' }); +} + +export class EnableAutoSecretRotationDocument extends ControlRunbookDocument { + constructor(scope: Construct, id: string, props: ControlRunbookProps) { + super(scope, id, { + ...props, + securityControlId: 'SecretsManager.1', + remediationName: 'EnableAutoSecretRotation', + scope: RemediationScope.REGIONAL, + resourceIdName: 'SecretARN', + updateDescription: HardCodedString.of('Enabled automatic rotation on secret and set schedule to 90 days.'), + }); + } + + /** @override */ + protected getExtraSteps(): AutomationStep[] { + return [ + super.getInputParamsStep({ + maximumAllowedRotationFrequency: 90, + }), + ]; + } + + /** @override */ + protected getInputParamsStepOutput(): Output[] { + const MaximumAllowedRotationFrequency: Output = { + name: 'maximumAllowedRotationFrequency', + outputType: DataTypeEnum.INTEGER, + selector: '$.Payload.maximumAllowedRotationFrequency', + }; + + const outputs: Output[] = [MaximumAllowedRotationFrequency]; + + return outputs; + } + /** @override */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + protected getRemediationParams(): { [_: string]: any } { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const params: { [_: string]: any } = super.getRemediationParams(); + + params.MaximumAllowedRotationFrequency = NumberVariable.of('GetInputParams.maximumAllowedRotationFrequency'); + + return params; + } +} diff --git a/source/playbooks/SC/ssmdocs/SC_SecretsManager.3.ts b/source/playbooks/SC/ssmdocs/SC_SecretsManager.3.ts new file mode 100644 index 00000000..e0ff4c8d --- /dev/null +++ b/source/playbooks/SC/ssmdocs/SC_SecretsManager.3.ts @@ -0,0 +1,55 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { ControlRunbookDocument, ControlRunbookProps, RemediationScope } from './control_runbook'; +import { PlaybookProps } from '../lib/control_runbooks-construct'; +import { AutomationStep, DataTypeEnum, HardCodedString, Output, StringListVariable } from '@cdklabs/cdk-ssm-documents'; + +export function createControlRunbook(scope: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new RemoveUnusedSecretDocument(scope, id, { ...props, controlId: 'SecretsManager.3' }); +} + +export class RemoveUnusedSecretDocument extends ControlRunbookDocument { + constructor(scope: Construct, id: string, props: ControlRunbookProps) { + super(scope, id, { + ...props, + securityControlId: 'SecretsManager.3', + remediationName: 'RemoveUnusedSecret', + scope: RemediationScope.REGIONAL, + resourceIdName: 'SecretARN', + updateDescription: HardCodedString.of('Removed the unused secret.'), + }); + } + + /** @override */ + protected getExtraSteps(): AutomationStep[] { + return [ + super.getInputParamsStep({ + unusedForDays: 90, + }), + ]; + } + + /** @override */ + protected getInputParamsStepOutput(): Output[] { + const EventTypes: Output = { + name: 'UnusedForDays', + outputType: DataTypeEnum.STRING_LIST, + selector: '$.Payload.unusedForDays', + }; + + const outputs: Output[] = [EventTypes]; + + return outputs; + } + + /** @override */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + protected getRemediationParams(): { [_: string]: any } { + const params = super.getRemediationParams(); + + params.UnusedForDays = StringListVariable.of('GetInputParams.UnusedForDays'); + + return params; + } +} diff --git a/source/playbooks/SC/ssmdocs/SC_SecretsManager.4.ts b/source/playbooks/SC/ssmdocs/SC_SecretsManager.4.ts new file mode 100644 index 00000000..66e4b62c --- /dev/null +++ b/source/playbooks/SC/ssmdocs/SC_SecretsManager.4.ts @@ -0,0 +1,55 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { Construct } from 'constructs'; +import { ControlRunbookDocument, ControlRunbookProps, RemediationScope } from './control_runbook'; +import { PlaybookProps } from '../lib/control_runbooks-construct'; +import { AutomationStep, DataTypeEnum, HardCodedString, Output, StringListVariable } from '@cdklabs/cdk-ssm-documents'; + +export function createControlRunbook(scope: Construct, id: string, props: PlaybookProps): ControlRunbookDocument { + return new UpdateSecretRotationPeriodDocument(scope, id, { ...props, controlId: 'SecretsManager.4' }); +} + +export class UpdateSecretRotationPeriodDocument extends ControlRunbookDocument { + constructor(scope: Construct, id: string, props: ControlRunbookProps) { + super(scope, id, { + ...props, + securityControlId: 'SecretsManager.4', + remediationName: 'UpdateSecretRotationPeriod', + scope: RemediationScope.REGIONAL, + resourceIdName: 'SecretARN', + updateDescription: HardCodedString.of('Rotated secret and set rotation schedule to 90 days.'), + }); + } + + /** @override */ + protected getExtraSteps(): AutomationStep[] { + return [ + super.getInputParamsStep({ + maxDaysSinceRotation: 90, + }), + ]; + } + + /** @override */ + protected getInputParamsStepOutput(): Output[] { + const EventTypes: Output = { + name: 'MaxDaysSinceRotation', + outputType: DataTypeEnum.STRING_LIST, + selector: '$.Payload.maxDaysSinceRotation', + }; + + const outputs: Output[] = [EventTypes]; + + return outputs; + } + + /** @override */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + protected getRemediationParams(): { [_: string]: any } { + const params = super.getRemediationParams(); + + params.MaxDaysSinceRotation = StringListVariable.of('GetInputParams.MaxDaysSinceRotation'); + + return params; + } +} diff --git a/source/playbooks/SC/ssmdocs/control_runbook.ts b/source/playbooks/SC/ssmdocs/control_runbook.ts index 2a482b84..e97a5718 100644 --- a/source/playbooks/SC/ssmdocs/control_runbook.ts +++ b/source/playbooks/SC/ssmdocs/control_runbook.ts @@ -164,13 +164,29 @@ export abstract class ControlRunbookDocument extends AutomationDocument { /** * @virtual - * @returns The inputs to the `parse_input.py` script + * @returns The `getInputParams` step to parse any user customized input parameters. */ - protected getParseInputStepInputs(): { [_: string]: IGenericVariable } { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + protected getInputParamsStep(defaultParameters: { [_: string]: any }): AutomationStep { + const getInputParamsStep = new ExecuteScriptStep(this, 'GetInputParams', { + language: ScriptLanguage.fromRuntime(this.runtimePython.name, 'get_input_params'), + code: ScriptCode.fromFile(fs.realpathSync(path.join(__dirname, '..', '..', 'common', 'get_input_params.py'))), + inputPayload: this.getInputParamsStepInputs(defaultParameters), + outputs: this.getInputParamsStepOutput(), + }); + + return getInputParamsStep; + } + + /** + * @virtual + * @returns The inputs to the `get_input_params.py` script + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + protected getInputParamsStepInputs(defaultParameters: { [_: string]: any }): { [_: string]: IGenericVariable } { return { - Finding: StringMapVariable.of('Finding'), - parse_id_pattern: HardCodedString.of(this.resourceIdRegex ?? ''), - expected_control_id: HardCodedStringList.of(this.expectedControlIds), + SecHubInputParams: StringMapVariable.of('ParseInput.InputParams'), + DefaultParams: HardCodedStringMap.of(defaultParameters), }; } @@ -209,8 +225,13 @@ export abstract class ControlRunbookDocument extends AutomationDocument { outputType: DataTypeEnum.STRING, selector: '$.Payload.product_arn', }; + const inputParamsOutput: Output = { + name: 'InputParams', + outputType: DataTypeEnum.STRING_MAP, + selector: '$.Payload.input_params', + }; - const outputs: Output[] = [findingIdOutput, productArnOutput, affectedObjectOutput]; + const outputs: Output[] = [findingIdOutput, productArnOutput, affectedObjectOutput, inputParamsOutput]; // Output the resource id if used if (this.resourceIdName) { @@ -225,6 +246,34 @@ export abstract class ControlRunbookDocument extends AutomationDocument { return outputs; } + /** + * @virtual + * @returns The inputs to the `parse_input.py` script + */ + protected getParseInputStepInputs(): { [_: string]: IGenericVariable } { + return { + Finding: StringMapVariable.of('Finding'), + parse_id_pattern: HardCodedString.of(this.resourceIdRegex ?? ''), + expected_control_id: HardCodedStringList.of(this.expectedControlIds), + }; + } + + /** + * @virtual + * @returns The output values of the `GetInputParams` step. + */ + protected getInputParamsStepOutput(): Output[] { + const inputParamsOutput: Output = { + name: 'InputParams', + outputType: DataTypeEnum.STRING_MAP, + selector: '$.Payload.input_params', + }; + + const outputs: Output[] = [inputParamsOutput]; + + return outputs; + } + /** * @virtual * @returns Additional `AutomationStep`s that must occur between the `ParseInput` and `Remediation` steps. diff --git a/source/playbooks/SC/ssmdocs/descriptions/AutoScaling.1.md b/source/playbooks/SC/ssmdocs/descriptions/AutoScaling.1.md index 1569f109..1b0cc8b3 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/AutoScaling.1.md +++ b/source/playbooks/SC/ssmdocs/descriptions/AutoScaling.1.md @@ -13,4 +13,4 @@ Default: 30 seconds * Remediation.Output ## Documentation Links -* [AFSBP AutoScaling.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-autoscaling-1) +* [AWS FSBP AutoScaling.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-autoscaling-1) diff --git a/source/playbooks/SC/ssmdocs/descriptions/CloudFormation.1.md b/source/playbooks/SC/ssmdocs/descriptions/CloudFormation.1.md index 4ab62194..2e23174b 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/CloudFormation.1.md +++ b/source/playbooks/SC/ssmdocs/descriptions/CloudFormation.1.md @@ -11,4 +11,4 @@ This document configures an SNS topic for notifications from a CloudFormation st * Remediation.Output ## Documentation Links -* [AFSBP v1.0.0 CloudFormation.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudformation-1) +* [AWS FSBP v1.0.0 CloudFormation.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudformation-1) diff --git a/source/playbooks/SC/ssmdocs/descriptions/CloudFront.1.md b/source/playbooks/SC/ssmdocs/descriptions/CloudFront.1.md new file mode 100644 index 00000000..b683a34c --- /dev/null +++ b/source/playbooks/SC/ssmdocs/descriptions/CloudFront.1.md @@ -0,0 +1,14 @@ +### Document Name - ASR-AFSBP_1.0.0_CloudFront.1 + +## What does this document do? +This document configures a default root object to be returned when visiting a CloudFront distribution. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP v1.0.0 CloudFront.1](https://docs.aws.amazon.com/securityhub/latest/userguide/cloudfront-controls.html#cloudfront-1) \ No newline at end of file diff --git a/source/playbooks/SC/ssmdocs/descriptions/CloudFront.12.md b/source/playbooks/SC/ssmdocs/descriptions/CloudFront.12.md new file mode 100644 index 00000000..29ca0c5d --- /dev/null +++ b/source/playbooks/SC/ssmdocs/descriptions/CloudFront.12.md @@ -0,0 +1,11 @@ +### Document Name - ASR-NIST_800_53_CloudFront.12 +## What does this document do? +This document enables sets the origin domain to a non-existent value to prevent a potential malicious takeover. +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +## Output Parameters +* Remediation.Output - Output from the remediation + +## Documentation Links +* [NIST CloudFront.1](https://docs.aws.amazon.com/securityhub/latest/userguide/cloudfront-controls.html#cloudfront-12) diff --git a/source/playbooks/SC/ssmdocs/descriptions/CloudTrail.1.md b/source/playbooks/SC/ssmdocs/descriptions/CloudTrail.1.md index 8f2f3f28..76f51a69 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/CloudTrail.1.md +++ b/source/playbooks/SC/ssmdocs/descriptions/CloudTrail.1.md @@ -8,4 +8,4 @@ Note: this remediation will create a NEW trail. * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. ## Documentation Links -* [AFSBP CloudTrail.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-1) +* [AWS FSBP CloudTrail.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-1) diff --git a/source/playbooks/SC/ssmdocs/descriptions/CloudTrail.2.md b/source/playbooks/SC/ssmdocs/descriptions/CloudTrail.2.md index a9b8249f..edb219eb 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/CloudTrail.2.md +++ b/source/playbooks/SC/ssmdocs/descriptions/CloudTrail.2.md @@ -8,4 +8,4 @@ This document enables SSE KMS encryption for log files using the ASR remediation * Remediation.Output - Output from the remediation ## Documentation Links -* [AFSBP CloudTrail.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-2) +* [AWS FSBP CloudTrail.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-2) diff --git a/source/playbooks/SC/ssmdocs/descriptions/CloudTrail.4.md b/source/playbooks/SC/ssmdocs/descriptions/CloudTrail.4.md index 9da59d7e..acd9c8e3 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/CloudTrail.4.md +++ b/source/playbooks/SC/ssmdocs/descriptions/CloudTrail.4.md @@ -11,4 +11,4 @@ This document enables CloudTrail log file validation. * Remediation.Output ## Documentation Links -* [AFSBP v1.0.0 CloudTrail.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-4) +* [AWS FSBP v1.0.0 CloudTrail.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-4) diff --git a/source/playbooks/SC/ssmdocs/descriptions/CloudTrail.5.md b/source/playbooks/SC/ssmdocs/descriptions/CloudTrail.5.md index 0ef92e7e..a90a8126 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/CloudTrail.5.md +++ b/source/playbooks/SC/ssmdocs/descriptions/CloudTrail.5.md @@ -11,4 +11,4 @@ This document configures CloudTrail to log to CloudWatch Logs. * Remediation.Output - Remediation results ## Documentation Links -* [AFSBP v1.0.0 CloudTrail.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-5) +* [AWS FSBP v1.0.0 CloudTrail.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-5) diff --git a/source/playbooks/SC/ssmdocs/descriptions/CodeBuild.2.md b/source/playbooks/SC/ssmdocs/descriptions/CodeBuild.2.md index 3b4bd72c..06d88bf6 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/CodeBuild.2.md +++ b/source/playbooks/SC/ssmdocs/descriptions/CodeBuild.2.md @@ -11,4 +11,4 @@ This document removes CodeBuild project environment variables containing clear t * Remediation.Output ## Documentation Links -* [AFSBP v1.0.0 CodeBuild.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-codebuild-2) +* [AWS FSBP v1.0.0 CodeBuild.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-codebuild-2) diff --git a/source/playbooks/SC/ssmdocs/descriptions/CodeBuild.5.md b/source/playbooks/SC/ssmdocs/descriptions/CodeBuild.5.md new file mode 100644 index 00000000..b9c3b780 --- /dev/null +++ b/source/playbooks/SC/ssmdocs/descriptions/CodeBuild.5.md @@ -0,0 +1,14 @@ +### Document Name - ASR-AFSBP_1.0.0_CodeBuild.5 + +## What does this document do? +This document removes CodeBuild project privileged mode to remove a build project's Docker container access to all devices. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP v1.0.0 CodeBuild.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-codebuild-5) diff --git a/source/playbooks/SC/ssmdocs/descriptions/Config.1.md b/source/playbooks/SC/ssmdocs/descriptions/Config.1.md index 012afc0e..978f7b17 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/Config.1.md +++ b/source/playbooks/SC/ssmdocs/descriptions/Config.1.md @@ -12,4 +12,4 @@ Enables AWS Config: * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. ## Documentation Links -* [AFSBP Config.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-config-1) +* [AWS FSBP Config.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-config-1) diff --git a/source/playbooks/SC/ssmdocs/descriptions/EC2.1.md b/source/playbooks/SC/ssmdocs/descriptions/EC2.1.md index f1aa66e0..27092d2e 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/EC2.1.md +++ b/source/playbooks/SC/ssmdocs/descriptions/EC2.1.md @@ -7,4 +7,4 @@ This document changes all public EC2 snapshots to private * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. ## Documentation Links -* [AFSBP EC2.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-1) +* [AWS FSBP EC2.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-1) diff --git a/source/playbooks/SC/ssmdocs/descriptions/EC2.15.md b/source/playbooks/SC/ssmdocs/descriptions/EC2.15.md index dbbd31ca..e1769a0f 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/EC2.15.md +++ b/source/playbooks/SC/ssmdocs/descriptions/EC2.15.md @@ -11,4 +11,4 @@ This document disables auto assignment of public IP addresses on a subnet. * Remediation.Output ## Documentation Links -* [AFSBP v1.0.0 EC2.15](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-15) \ No newline at end of file +* [AWS FSBP v1.0.0 EC2.15](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-15) \ No newline at end of file diff --git a/source/playbooks/SC/ssmdocs/descriptions/EC2.18.md b/source/playbooks/SC/ssmdocs/descriptions/EC2.18.md new file mode 100644 index 00000000..d0631de5 --- /dev/null +++ b/source/playbooks/SC/ssmdocs/descriptions/EC2.18.md @@ -0,0 +1,15 @@ +### Document Name - ASR-AFSBP_1.0.0_EC2.18 + +## What does this document do? +This document revokes inbound security group rules that allow unrestricted access to ports that are not authorized. +Authorized ports are listed in authorizedTcpPorts and authorizedUdpPorts parameters. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AFSBP v1.0.0 EC2.18](https://docs.aws.amazon.com/securityhub/latest/userguide/ec2-controls.html#ec2-18) \ No newline at end of file diff --git a/source/playbooks/SC/ssmdocs/descriptions/EC2.19.md b/source/playbooks/SC/ssmdocs/descriptions/EC2.19.md new file mode 100644 index 00000000..b7f0db6c --- /dev/null +++ b/source/playbooks/SC/ssmdocs/descriptions/EC2.19.md @@ -0,0 +1,14 @@ +### Document Name - ASR-AFSBP_1.0.0_EC2.19 + +## What does this document do? +This document disables unrestricted access to high risk ports. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AFSBP v1.0.0 EC2.19](https://docs.aws.amazon.com/securityhub/latest/userguide/ec2-controls.html#ec2-19) \ No newline at end of file diff --git a/source/playbooks/SC/ssmdocs/descriptions/EC2.2.md b/source/playbooks/SC/ssmdocs/descriptions/EC2.2.md index de1545fe..b566ff6e 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/EC2.2.md +++ b/source/playbooks/SC/ssmdocs/descriptions/EC2.2.md @@ -12,4 +12,4 @@ group using the AWS SSM Runbook AWSConfigRemediation-RemoveVPCDefaultSecurityGro * Remediation.Output - Output from AWSConfigRemediation-RemoveVPCDefaultSecurityGroupRules SSM doc ## Documentation Links -* [AFSBP EC2.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-2) +* [AWS FSBP EC2.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-2) diff --git a/source/playbooks/SC/ssmdocs/descriptions/EC2.23.md b/source/playbooks/SC/ssmdocs/descriptions/EC2.23.md new file mode 100644 index 00000000..9f88cbad --- /dev/null +++ b/source/playbooks/SC/ssmdocs/descriptions/EC2.23.md @@ -0,0 +1,14 @@ +### Document Name - ASR-AFSBP_1.0.0_EC2.23 + +## What does this document do? +This document turns off AutoAcceptSharedAttachments on a transit gateway to ensure that only authorized VPC attachment requests are accepted. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AFSBP v1.0.0 EC2.23](https://docs.aws.amazon.com/securityhub/latest/userguide/ec2-controls.html#ec2-23) \ No newline at end of file diff --git a/source/playbooks/SC/ssmdocs/descriptions/EC2.4.md b/source/playbooks/SC/ssmdocs/descriptions/EC2.4.md new file mode 100644 index 00000000..1c4cba84 --- /dev/null +++ b/source/playbooks/SC/ssmdocs/descriptions/EC2.4.md @@ -0,0 +1,14 @@ +### Document Name - ASR-AFSBP_1.0.0_EC2.4 + +## What does this document do? +This document terminates an EC2 instance if it has been stopped for longer than the allowed number of days defined by the AllowedDays parameter. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AFSBP v1.0.0 EC2.4](https://docs.aws.amazon.com/securityhub/latest/userguide/ec2-controls.html#ec2-4) \ No newline at end of file diff --git a/source/playbooks/SC/ssmdocs/descriptions/EC2.6.md b/source/playbooks/SC/ssmdocs/descriptions/EC2.6.md index 04524e92..fece030f 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/EC2.6.md +++ b/source/playbooks/SC/ssmdocs/descriptions/EC2.6.md @@ -11,4 +11,4 @@ Enables VPC Flow Logs for a VPC * Remediation.Output - Remediation results ## Documentation Links -* [AFSBP EC2.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-6) +* [AWS FSBP EC2.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-6) diff --git a/source/playbooks/SC/ssmdocs/descriptions/EC2.7.md b/source/playbooks/SC/ssmdocs/descriptions/EC2.7.md index ec863216..9ab00dba 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/EC2.7.md +++ b/source/playbooks/SC/ssmdocs/descriptions/EC2.7.md @@ -8,4 +8,4 @@ This document enables `EBS Encryption by default` for an AWS account in the curr * Remediation.Output ## Documentation Links -* [AFSBP EC2.7](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-7) +* [AWS FSBP EC2.7](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-7) diff --git a/source/playbooks/SC/ssmdocs/descriptions/EC2.8.md b/source/playbooks/SC/ssmdocs/descriptions/EC2.8.md new file mode 100644 index 00000000..8477d08e --- /dev/null +++ b/source/playbooks/SC/ssmdocs/descriptions/EC2.8.md @@ -0,0 +1,11 @@ +### Document Name - ASR-AFSBP_1.0.0_EC2.8 +## What does this document do? +This document enables IMDSv2 on an Instance for an AWS account in the current region by calling another SSM document. +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP EC2.8](https://docs.aws.amazon.com/securityhub/latest/userguide/ec2-controls.html#ec2-8) diff --git a/source/playbooks/SC/ssmdocs/descriptions/ECR.1.md b/source/playbooks/SC/ssmdocs/descriptions/ECR.1.md new file mode 100644 index 00000000..5bb4fd80 --- /dev/null +++ b/source/playbooks/SC/ssmdocs/descriptions/ECR.1.md @@ -0,0 +1,14 @@ +### Document Name - ASR-AFSBP_1.0.0_ECR.1 + +## What does this document do? +This document enables image scanning configuration on a private ECR repository. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AFSBP v1.0.0 ECR.1](https://docs.aws.amazon.com/securityhub/latest/userguide/ecr-controls.html#ecr-1) \ No newline at end of file diff --git a/source/playbooks/SC/ssmdocs/descriptions/GuardDuty.1.md b/source/playbooks/SC/ssmdocs/descriptions/GuardDuty.1.md new file mode 100644 index 00000000..daccc4d9 --- /dev/null +++ b/source/playbooks/SC/ssmdocs/descriptions/GuardDuty.1.md @@ -0,0 +1,14 @@ +### Document Name - ASR-AFSBP_1.0.0_GuardDuty.1 + +## What does this document do? +This document enables GuardDuty. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP v1.0.0 GuardDuty.1](https://docs.aws.amazon.com/securityhub/latest/userguide/guardduty-controls.html#guardduty-1) \ No newline at end of file diff --git a/source/playbooks/SC/ssmdocs/descriptions/IAM.3.md b/source/playbooks/SC/ssmdocs/descriptions/IAM.3.md index a0ea8c2d..9aeb4714 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/IAM.3.md +++ b/source/playbooks/SC/ssmdocs/descriptions/IAM.3.md @@ -11,4 +11,4 @@ This document disables active keys that have not been rotated for more than 90 d * Remediation.Output ## Documentation Links -* [AFSBP v1.0.0 IAM.3](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-iam-3) +* [AWS FSBP v1.0.0 IAM.3](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-iam-3) diff --git a/source/playbooks/SC/ssmdocs/descriptions/IAM.7.md b/source/playbooks/SC/ssmdocs/descriptions/IAM.7.md index 3eb79533..db0003d4 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/IAM.7.md +++ b/source/playbooks/SC/ssmdocs/descriptions/IAM.7.md @@ -4,7 +4,7 @@ This document establishes a default password policy. ## Security Standards and Controls -* AFSBP IAM.7 +* AWS FSBP IAM.7 ## Input Parameters * Finding: (Required) Security Hub finding details JSON @@ -13,4 +13,4 @@ This document establishes a default password policy. * Remediation.Output ## Documentation Links -* [AFSBP IAM.7](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-iam-7) +* [AWS FSBP IAM.7](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-iam-7) diff --git a/source/playbooks/SC/ssmdocs/descriptions/IAM.8.md b/source/playbooks/SC/ssmdocs/descriptions/IAM.8.md index c83e1d1e..53cf37e7 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/IAM.8.md +++ b/source/playbooks/SC/ssmdocs/descriptions/IAM.8.md @@ -13,4 +13,4 @@ This document ensures that credentials unused for 90 days or greater are disable SEE AWSConfigRemediation-RevokeUnusedIAMUserCredentials ## Documentation Links -* [AFSBP IAM.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-iam-8) +* [AWS FSBP IAM.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-iam-8) diff --git a/source/playbooks/SC/ssmdocs/descriptions/Lambda.1.md b/source/playbooks/SC/ssmdocs/descriptions/Lambda.1.md index a43c9137..c03ce880 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/Lambda.1.md +++ b/source/playbooks/SC/ssmdocs/descriptions/Lambda.1.md @@ -10,4 +10,4 @@ function. The remediation is to remove the SID of the public policy. * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. ## Documentation Links -* [AFSBP Lambda.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-lambda-1) +* [AWS FSBP Lambda.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-lambda-1) diff --git a/source/playbooks/SC/ssmdocs/descriptions/RDS.1.md b/source/playbooks/SC/ssmdocs/descriptions/RDS.1.md index 4265493b..ec927334 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/RDS.1.md +++ b/source/playbooks/SC/ssmdocs/descriptions/RDS.1.md @@ -7,4 +7,4 @@ This document changes public RDS snapshot to private * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. ## Documentation Links -* [AFSBP RDS.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-1) +* [AWS FSBP RDS.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-1) diff --git a/source/playbooks/SC/ssmdocs/descriptions/RDS.13.md b/source/playbooks/SC/ssmdocs/descriptions/RDS.13.md index c119ed71..26882093 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/RDS.13.md +++ b/source/playbooks/SC/ssmdocs/descriptions/RDS.13.md @@ -11,4 +11,4 @@ This document enables `Auto minor version upgrade` on a given Amazon RDS instanc * Remediation.Output - The standard HTTP response from the ModifyDBInstance API. ## Documentation Links -* [AFSBP RDS.13](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-13) +* [AWS FSBP RDS.13](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-13) diff --git a/source/playbooks/SC/ssmdocs/descriptions/RDS.16.md b/source/playbooks/SC/ssmdocs/descriptions/RDS.16.md index a77d1f67..f907d5d8 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/RDS.16.md +++ b/source/playbooks/SC/ssmdocs/descriptions/RDS.16.md @@ -11,4 +11,4 @@ This document enables `Copy tags to snapshots` on a given Amazon RDS cluster by * Remediation.Output - The standard HTTP response from the ModifyDBCluster API. ## Documentation Links -* [AFSBP RDS.16](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-16) +* [AWS FSBP RDS.16](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-16) diff --git a/source/playbooks/SC/ssmdocs/descriptions/RDS.2.md b/source/playbooks/SC/ssmdocs/descriptions/RDS.2.md index 38797ba3..ea3c5878 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/RDS.2.md +++ b/source/playbooks/SC/ssmdocs/descriptions/RDS.2.md @@ -7,7 +7,7 @@ This document disables public access to RDS instances by calling another SSM doc * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. ## Documentation Links -* [AFSBP RDS.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-2) +* [AWS FSBP RDS.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-2) ## Troubleshooting * ModifyDBInstance isn't supported for a DB instance in a Multi-AZ DB Cluster. diff --git a/source/playbooks/SC/ssmdocs/descriptions/RDS.4.md b/source/playbooks/SC/ssmdocs/descriptions/RDS.4.md index 068199c7..439f7ce7 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/RDS.4.md +++ b/source/playbooks/SC/ssmdocs/descriptions/RDS.4.md @@ -10,4 +10,4 @@ This document encrypts an unencrypted RDS snapshot by calling another SSM docume * KMSKeyId: (Optional) ID, ARN or Alias for the AWS KMS Customer-Managed Key (CMK) to use to encrypt the snapshot. ## Documentation Links -* [AFSBP RDS.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-4) +* [AWS FSBP RDS.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-4) diff --git a/source/playbooks/SC/ssmdocs/descriptions/RDS.5.md b/source/playbooks/SC/ssmdocs/descriptions/RDS.5.md index 3efb7f7b..5dcc1577 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/RDS.5.md +++ b/source/playbooks/SC/ssmdocs/descriptions/RDS.5.md @@ -9,4 +9,4 @@ This document configures an RDS DB instance for multiple Availability Zones by c * RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. ## Documentation Links -* [AFSBP RDS.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-5) +* [AWS FSBP RDS.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-5) diff --git a/source/playbooks/SC/ssmdocs/descriptions/RDS.6.md b/source/playbooks/SC/ssmdocs/descriptions/RDS.6.md index 455215da..10a65904 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/RDS.6.md +++ b/source/playbooks/SC/ssmdocs/descriptions/RDS.6.md @@ -11,4 +11,4 @@ This document enables `Enhanced Monitoring` on a given Amazon RDS instance by ca * VerifyRemediation.Output - The standard HTTP response from the ModifyDBInstance API. ## Documentation Links -* [AFSBP RDS.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-6) +* [AWS FSBP RDS.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-6) diff --git a/source/playbooks/SC/ssmdocs/descriptions/RDS.7.md b/source/playbooks/SC/ssmdocs/descriptions/RDS.7.md index bd611595..eb21924e 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/RDS.7.md +++ b/source/playbooks/SC/ssmdocs/descriptions/RDS.7.md @@ -11,4 +11,4 @@ This document enables `Deletion Protection` on a given Amazon RDS cluster by cal * Remediation.Output - The standard HTTP response from the ModifyDBCluster API. ## Documentation Links -* [AFSBP RDS.7](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-7) +* [AWS FSBP RDS.7](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-7) diff --git a/source/playbooks/SC/ssmdocs/descriptions/RDS.8.md b/source/playbooks/SC/ssmdocs/descriptions/RDS.8.md index 4c039f7d..e353ccb2 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/RDS.8.md +++ b/source/playbooks/SC/ssmdocs/descriptions/RDS.8.md @@ -9,4 +9,4 @@ This document enables `Deletion Protection` on a given Amazon RDS cluster by cal * RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. ## Documentation Links -* [AFSBP RDS.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-8) +* [AWS FSBP RDS.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-8) diff --git a/source/playbooks/SC/ssmdocs/descriptions/Redshift.1.md b/source/playbooks/SC/ssmdocs/descriptions/Redshift.1.md index cbfe6c9d..36238844 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/Redshift.1.md +++ b/source/playbooks/SC/ssmdocs/descriptions/Redshift.1.md @@ -9,4 +9,4 @@ This document disables public access to a Redshift cluster by calling another SS * RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. ## Documentation Links -* [AFSBP Redshift.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-redshift-1) +* [AWS FSBP Redshift.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-redshift-1) diff --git a/source/playbooks/SC/ssmdocs/descriptions/Redshift.3.md b/source/playbooks/SC/ssmdocs/descriptions/Redshift.3.md index fdef835b..90600a44 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/Redshift.3.md +++ b/source/playbooks/SC/ssmdocs/descriptions/Redshift.3.md @@ -9,4 +9,4 @@ This document enables automatic snapshots on a Redshift cluster by calling anoth * RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. ## Documentation Links -* [AFSBP Redshift.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-redshift-3) +* [AWS FSBP Redshift.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-redshift-3) diff --git a/source/playbooks/SC/ssmdocs/descriptions/Redshift.4.md b/source/playbooks/SC/ssmdocs/descriptions/Redshift.4.md index ce133edb..9476527a 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/Redshift.4.md +++ b/source/playbooks/SC/ssmdocs/descriptions/Redshift.4.md @@ -9,4 +9,4 @@ This document disables public access to a Redshift cluster by calling another SS * RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. ## Documentation Links -* [AFSBP Redshift.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-redshift-1) +* [AWS FSBP Redshift.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-redshift-1) diff --git a/source/playbooks/SC/ssmdocs/descriptions/Redshift.6.md b/source/playbooks/SC/ssmdocs/descriptions/Redshift.6.md index dda71668..edaf4fbf 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/Redshift.6.md +++ b/source/playbooks/SC/ssmdocs/descriptions/Redshift.6.md @@ -9,4 +9,4 @@ This document enables automatic version upgrade on a Redshift cluster by calling * RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. ## Documentation Links -* [AFSBP Redshift.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-redshift-6) +* [AWS FSBP Redshift.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-redshift-6) diff --git a/source/playbooks/SC/ssmdocs/descriptions/S3.1.md b/source/playbooks/SC/ssmdocs/descriptions/S3.1.md index a992932f..1cafdede 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/S3.1.md +++ b/source/playbooks/SC/ssmdocs/descriptions/S3.1.md @@ -11,4 +11,4 @@ This document blocks public access to all buckets by default at the account leve * Remediation.Output ## Documentation Links -* [AFSBP v1.0.0 S3.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-1) +* [AWS FSBP v1.0.0 S3.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-1) diff --git a/source/playbooks/SC/ssmdocs/descriptions/S3.11.md b/source/playbooks/SC/ssmdocs/descriptions/S3.11.md new file mode 100644 index 00000000..4612c4ee --- /dev/null +++ b/source/playbooks/SC/ssmdocs/descriptions/S3.11.md @@ -0,0 +1,16 @@ +### Document Name - ASR-AFSBP_1.0.0_S3.11 + +## What does this document do? +This document configures event notification to a S3 bucket. + +## Input Parameters +* AccountId: (Required) Account ID of the account for the finding +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +* BucketName: (Required) Name of bucket that event notifications will be triggered on. +* TopicName: (Required) The name of the SNS topic to create and configure for notifications. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP v1.0.0 S3.11](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-11) diff --git a/source/playbooks/SC/ssmdocs/descriptions/S3.13.md b/source/playbooks/SC/ssmdocs/descriptions/S3.13.md new file mode 100644 index 00000000..97bf1daa --- /dev/null +++ b/source/playbooks/SC/ssmdocs/descriptions/S3.13.md @@ -0,0 +1,15 @@ +### Document Name - ASR-AFSBP_1.0.0_S3.13 + +## What does this document do? +This document sets an example lifecycle policy that transfers objects greater than 10 GB to S3 Intelligent Tiering after 90 days. +It is recommended to set lifecycle policies appropriate for the objects stored in your S3 bucket. + +## Input Parameters +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +* BucketName: (Required) Name of the S3 bucket. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP v1.0.0 S3.11](https://docs.aws.amazon.com/securityhub/latest/userguide/s3-controls.html#s3-13) diff --git a/source/playbooks/SC/ssmdocs/descriptions/S3.2.md b/source/playbooks/SC/ssmdocs/descriptions/S3.2.md index 1f84b2f5..fe724e2d 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/S3.2.md +++ b/source/playbooks/SC/ssmdocs/descriptions/S3.2.md @@ -11,6 +11,6 @@ This document blocks all public access to an S3 bucket. * Remediation.Output ## Documentation Links -* [AFSBP v1.0.0 S3.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-2) -* [AFSBP v1.0.0 S3.3](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-3) -* [AFSBP v1.0.0 S3.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-8) +* [AWS FSBP v1.0.0 S3.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-2) +* [AWS FSBP v1.0.0 S3.3](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-3) +* [AWS FSBP v1.0.0 S3.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-8) diff --git a/source/playbooks/SC/ssmdocs/descriptions/S3.4.md b/source/playbooks/SC/ssmdocs/descriptions/S3.4.md index dbe9c040..c32c52af 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/S3.4.md +++ b/source/playbooks/SC/ssmdocs/descriptions/S3.4.md @@ -11,4 +11,4 @@ This document enables AES-256 as the default encryption for an S3 bucket. * Remediation.Output ## Documentation Links -* [AFSBP v1.0.0 S3.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-4) +* [AWS FSBP v1.0.0 S3.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-4) diff --git a/source/playbooks/SC/ssmdocs/descriptions/S3.5.md b/source/playbooks/SC/ssmdocs/descriptions/S3.5.md index 95642cf4..ddfa3d40 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/S3.5.md +++ b/source/playbooks/SC/ssmdocs/descriptions/S3.5.md @@ -11,4 +11,4 @@ This document adds a bucket policy to restrict internet access to https only. * Remediation.Output ## Documentation Links -* [AFSBP v1.0.0 S3.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-5) +* [AWS FSBP v1.0.0 S3.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-5) diff --git a/source/playbooks/SC/ssmdocs/descriptions/S3.6.md b/source/playbooks/SC/ssmdocs/descriptions/S3.6.md index 5beb62bb..2a4a66a9 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/S3.6.md +++ b/source/playbooks/SC/ssmdocs/descriptions/S3.6.md @@ -11,4 +11,4 @@ This document restricts cross-account access to a bucket in the local account. * Remediation.Output ## Documentation Links -* [AFSBP v1.0.0 S3.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-6) +* [AWS FSBP v1.0.0 S3.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-6) diff --git a/source/playbooks/SC/ssmdocs/descriptions/SNS.1.md b/source/playbooks/SC/ssmdocs/descriptions/SNS.1.md index d996fe98..f44b8716 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/SNS.1.md +++ b/source/playbooks/SC/ssmdocs/descriptions/SNS.1.md @@ -11,4 +11,4 @@ * Remediation.Output ## Documentation Links - * [AFSBP v1.0.0 SNS.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-sns-1) \ No newline at end of file + * [AWS FSBP v1.0.0 SNS.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-sns-1) \ No newline at end of file diff --git a/source/playbooks/SC/ssmdocs/descriptions/SNS.2.md b/source/playbooks/SC/ssmdocs/descriptions/SNS.2.md index 4ba3e8ad..67f84fab 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/SNS.2.md +++ b/source/playbooks/SC/ssmdocs/descriptions/SNS.2.md @@ -11,4 +11,4 @@ * Remediation.Output ## Documentation Links - * [AFSBP v1.0.0 SNS.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-sns-2) \ No newline at end of file + * [AWS FSBP v1.0.0 SNS.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-sns-2) \ No newline at end of file diff --git a/source/playbooks/SC/ssmdocs/descriptions/SQS.1.md b/source/playbooks/SC/ssmdocs/descriptions/SQS.1.md index 21e83c15..83aa22ba 100644 --- a/source/playbooks/SC/ssmdocs/descriptions/SQS.1.md +++ b/source/playbooks/SC/ssmdocs/descriptions/SQS.1.md @@ -11,4 +11,4 @@ This document enables encryption at rest using AWS KMS for SQS Queues. * Remediation.Output ## Documentation Links -* [AFSBP v1.0.0 SQS.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-sqs-1) +* [AWS FSBP v1.0.0 SQS.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-sqs-1) diff --git a/source/playbooks/SC/ssmdocs/descriptions/SSM.4.md b/source/playbooks/SC/ssmdocs/descriptions/SSM.4.md new file mode 100644 index 00000000..c06d34cb --- /dev/null +++ b/source/playbooks/SC/ssmdocs/descriptions/SSM.4.md @@ -0,0 +1,14 @@ +### Document Name - ASR-AFSBP_1.0.0_SSM.4 + +## What does this document do? +This document modifies SSM document permissions to prevent cross-account public access. + +## Input Parameters +* DocumentArn: (Required) SSM Document ARN that will be changed. +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AFSBP v1.0.0 SSM.4](https://docs.aws.amazon.com/securityhub/latest/userguide/ssm-controls.html#ssm-4) \ No newline at end of file diff --git a/source/playbooks/SC/ssmdocs/descriptions/SecretsManager.1.md b/source/playbooks/SC/ssmdocs/descriptions/SecretsManager.1.md new file mode 100644 index 00000000..4dc9b2a2 --- /dev/null +++ b/source/playbooks/SC/ssmdocs/descriptions/SecretsManager.1.md @@ -0,0 +1,14 @@ +### Document Name - ASR-AFSBP_1.0.0_SecretsManager.1 + +## What does this document do? +This document enables automatic rotation on a Secrets Manager secret if a Lambda function is already associated with it. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AFSBP v1.0.0 SecretsManager.1](https://docs.aws.amazon.com/securityhub/latest/userguide/secretsmanager-controls.html#secretsmanager-1) \ No newline at end of file diff --git a/source/playbooks/SC/ssmdocs/descriptions/SecretsManager.3.md b/source/playbooks/SC/ssmdocs/descriptions/SecretsManager.3.md new file mode 100644 index 00000000..3e84babc --- /dev/null +++ b/source/playbooks/SC/ssmdocs/descriptions/SecretsManager.3.md @@ -0,0 +1,15 @@ +### Document Name - ASR-AFSBP_1.0.0_SecretsManager.3 + +## What does this document do? +This document deletes a secret that has been unused for the number of days specified in the unusedForDays parameter (Default: 90 days). + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +* SecretARN: (Required) The ARN of the Secrets Manager secret. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AFSBP v1.0.0 SecretsManager.3](https://docs.aws.amazon.com/securityhub/latest/userguide/secretsmanager-controls.html#secretsmanager-3) \ No newline at end of file diff --git a/source/playbooks/SC/ssmdocs/descriptions/SecretsManager.4.md b/source/playbooks/SC/ssmdocs/descriptions/SecretsManager.4.md new file mode 100644 index 00000000..2187c11c --- /dev/null +++ b/source/playbooks/SC/ssmdocs/descriptions/SecretsManager.4.md @@ -0,0 +1,14 @@ +### Document Name - ASR-AFSBP_1.0.0_SecretsManager.4 + +## What does this document do? +This document rotates a secret and sets its rotation period to 90 days. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AFSBP v1.0.0 SecretsManager.4](https://docs.aws.amazon.com/securityhub/latest/userguide/secretsmanager-controls.html#secretsmanager-4) \ No newline at end of file diff --git a/source/playbooks/SC/ssmdocs/scripts/check_for_s3_bucket_name.py b/source/playbooks/SC/ssmdocs/scripts/check_for_s3_bucket_name.py index b5ffa773..670c7b00 100644 --- a/source/playbooks/SC/ssmdocs/scripts/check_for_s3_bucket_name.py +++ b/source/playbooks/SC/ssmdocs/scripts/check_for_s3_bucket_name.py @@ -3,52 +3,51 @@ import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def check_for_s3_bucket_name(_, __): try: ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) s3_bucket_name_for_audit_logging = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/afsbp/1.0.0/REDSHIFT.4/S3BucketNameForAuditLogging' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/afsbp/1.0.0/REDSHIFT.4/S3BucketNameForAuditLogging" + )["Parameter"].get("Value", "unknown") except Exception: - return { - "s3_bucket_name_for_redshift_audit_logging": "NOT_AVAILABLE" - } + return {"s3_bucket_name_for_redshift_audit_logging": "NOT_AVAILABLE"} return { "s3_bucket_name_for_redshift_audit_logging": s3_bucket_name_for_audit_logging } diff --git a/source/playbooks/SC/ssmdocs/scripts/test/test_parse_event.py b/source/playbooks/SC/ssmdocs/scripts/test/test_parse_event.py deleted file mode 100644 index 0307cdb6..00000000 --- a/source/playbooks/SC/ssmdocs/scripts/test/test_parse_event.py +++ /dev/null @@ -1,177 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 -import pytest - -from newplaybook_parse_input import parse_event -def event(): - return { - 'expected_control_id': '2.3', - 'parse_id_pattern': '^arn:(?:aws|aws-cn|aws-us-gov):s3:::([A-Za-z0-9.-]{3,63})$', - 'Finding': { - "ProductArn": "arn:aws:securityhub:us-east-2::product/aws/securityhub", - "Types": [ - "Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" - ], - "Description": "Details: 2.3 Ensure the S3 bucket used to store CloudTrail logs is not publicly accessible", - "SchemaVersion": "2018-10-08", - "Compliance": { - "Status": "WARNING", - "StatusReasons": [ - { - "Description": "The finding is in a WARNING state, because the S3 Bucket associated with this rule is in a different region/account. This rule does not support cross-region/cross-account checks, so it is recommended to disable this control in this region/account and only run it in the region/account where the resource is located.", - "ReasonCode": "S3_BUCKET_CROSS_ACCOUNT_CROSS_REGION" - } - ] - }, - "GeneratorId": "arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/2.3", - "FirstObservedAt": "2020-05-20T05:02:44.203Z", - "CreatedAt": "2020-05-20T05:02:44.203Z", - "RecordState": "ACTIVE", - "Title": "2.3 Ensure the S3 bucket used to store CloudTrail logs is not publicly accessible", - "Workflow": { - "Status": "NEW" - }, - "LastObservedAt": "2020-06-17T13:01:35.884Z", - "Severity": { - "Normalized": 90, - "Label": "CRITICAL", - "Product": 90, - "Original": "CRITICAL" - }, - "UpdatedAt": "2020-06-17T13:01:25.561Z", - "WorkflowState": "NEW", - "ProductFields": { - "StandardsGuideArn": "arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0", - "StandardsGuideSubscriptionArn": "arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0", - "RuleId": "2.3", - "RecommendationUrl": "https://docs.aws.amazon.com/console/securityhub/standards-cis-2.3/remediation", - "RelatedAWSResources:0/name": "securityhub-s3-bucket-public-read-prohibited-4414615a", - "RelatedAWSResources:0/type": "AWS::Config::ConfigRule", - "RelatedAWSResources:1/name": "securityhub-s3-bucket-public-write-prohibited-f104fcda", - "RelatedAWSResources:1/type": "AWS::Config::ConfigRule", - "StandardsControlArn": "arn:aws:securityhub:us-east-2:111111111111:control/cis-aws-foundations-benchmark/v/1.2.0/2.3", - "aws/securityhub/SeverityLabel": "CRITICAL", - "aws/securityhub/ProductName": "Security Hub", - "aws/securityhub/CompanyName": "AWS", - "aws/securityhub/annotation": "The finding is in a WARNING state, because the S3 Bucket associated with this rule is in a different region/account. This rule does not support cross-region/cross-account checks, so it is recommended to disable this control in this region/account and only run it in the region/account where the resource is located.", - "aws/securityhub/FindingId": "arn:aws:securityhub:us-east-2::product/aws/securityhub/arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0/2.3/finding/f51c716c-b33c-4949-b748-2ffd22bdceec" - }, - "AwsAccountId": "111111111111", - "Id": "arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0/2.3/finding/f51c716c-b33c-4949-b748-2ffd22bdceec", - "Remediation": { - "Recommendation": { - "Text": "For directions on how to fix this issue, please consult the AWS Security Hub CIS documentation.", - "Url": "https://docs.aws.amazon.com/console/securityhub/standards-cis-2.3/remediation" - } - }, - "Resources": [ - { - "Partition": "aws", - "Type": "AwsS3Bucket", - "Region": "us-east-2", - "Id": "arn:aws:s3:::cloudtrail-awslogs-111111111111-kjfskljdfl" - } - ] - } - } - -def expected(): - return { - "account_id": '111111111111', - "resource_id": 'cloudtrail-awslogs-111111111111-kjfskljdfl', - "finding_id": 'arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0/2.3/finding/f51c716c-b33c-4949-b748-2ffd22bdceec', - "product_arn": 'arn:aws:securityhub:us-east-2::product/aws/securityhub', - "control_id": '2.3', - "object": { - "Type": 'AwsS3Bucket', - "Id": 'cloudtrail-awslogs-111111111111-kjfskljdfl', - "OutputKey": 'Remediation.Output' - }, - "matches": [ "cloudtrail-awslogs-111111111111-kjfskljdfl" ] - } - -def test_parse_event(): - parsed_event = parse_event(event(), {}) - assert parsed_event == expected() - -def test_parse_event_multimatch(): - expected_result = expected() - expected_result['matches'] = [ - "aws", - "cloudtrail-awslogs-111111111111-kjfskljdfl" - ] - test_event = event() - test_event['resource_index'] = 2 - test_event['parse_id_pattern'] = '^arn:((?:aws|aws-cn|aws-us-gov)):s3:::([A-Za-z0-9.-]{3,63})$' - parsed_event = parse_event(test_event, {}) - assert parsed_event == expected_result - -def test_bad_finding_id(): - test_event = event() - test_event['Finding']['Id'] = "badvalue" - with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) - assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Finding Id is invalid: badvalue' - -def test_bad_control_id(): - test_event = event() - test_event['Finding']['Id'] = "arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0//finding/f51c716c-b33c-4949-b748-2ffd22bdceec" - with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) - assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Finding Id is invalid: arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0//finding/f51c716c-b33c-4949-b748-2ffd22bdceec - missing Control Id' - -def test_control_id_nomatch(): - test_event = event() - test_event['Finding']['Id'] = "arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0/2.4/finding/f51c716c-b33c-4949-b748-2ffd22bdceec" - with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) - assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Control Id from input (2.4) does not match 2.3' - -def test_bad_account_id(): - test_event = event() - test_event['Finding']['AwsAccountId'] = "1234123412345" - with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) - assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: AwsAccountId is invalid: 1234123412345' - -def test_bad_productarn(): - test_event = event() - test_event['Finding']['ProductArn'] = "badvalue" - with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) - assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: ProductArn is invalid: badvalue' - -def test_bad_resource_match(): - test_event = event() - test_event['parse_id_pattern'] = '^arn:(?:aws|aws-cn|aws-us-gov):logs:::([A-Za-z0-9.-]{3,63})$' - with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) - assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Invalid resource Id arn:aws:s3:::cloudtrail-awslogs-111111111111-kjfskljdfl' - -def test_no_resource_pattern(): - test_event = event() - expected_result = expected() - - test_event['parse_id_pattern'] = '' - expected_result['resource_id'] = 'arn:aws:s3:::cloudtrail-awslogs-111111111111-kjfskljdfl' - expected_result['matches'] = [] - expected_result['object']['Id'] = expected_result['resource_id'] - parsed_event = parse_event(test_event, {}) - assert parsed_event == expected_result - -def test_no_resource_pattern_no_resource_id(): - test_event = event() - - test_event['parse_id_pattern'] = '' - test_event['Finding']['Resources'][0]['Id'] = '' - - with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) - assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Resource Id is missing from the finding json Resources (Id)' diff --git a/source/playbooks/SC/test/__snapshots__/security_controls_stack.test.ts.snap b/source/playbooks/SC/test/__snapshots__/security_controls_stack.test.ts.snap index 197f4cd2..5fc39710 100644 --- a/source/playbooks/SC/test/__snapshots__/security_controls_stack.test.ts.snap +++ b/source/playbooks/SC/test/__snapshots__/security_controls_stack.test.ts.snap @@ -357,6 +357,22 @@ exports[`member stack 1`] = ` "Available", ], }, + "ControlRunbooksEnableCloudFront12Condition59835E00": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableCloudFront12B883E8E5", + }, + "Available", + ], + }, + "ControlRunbooksEnableCloudFront1ConditionD78B5553": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableCloudFront1A6026987", + }, + "Available", + ], + }, "ControlRunbooksEnableCloudTrail1ConditionB7EBAA86": { "Fn::Equals": [ { @@ -421,6 +437,14 @@ exports[`member stack 1`] = ` "Available", ], }, + "ControlRunbooksEnableCodeBuild5Condition5FF93A0A": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableCodeBuild5144FBB6F", + }, + "Available", + ], + }, "ControlRunbooksEnableConfig1Condition8CEB8627": { "Fn::Equals": [ { @@ -445,6 +469,22 @@ exports[`member stack 1`] = ` "Available", ], }, + "ControlRunbooksEnableEC218Condition903B1C90": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableEC21822A124F1", + }, + "Available", + ], + }, + "ControlRunbooksEnableEC219Condition2421DE99": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableEC21919C72DDA", + }, + "Available", + ], + }, "ControlRunbooksEnableEC21ConditionD4F1277B": { "Fn::Equals": [ { @@ -453,6 +493,14 @@ exports[`member stack 1`] = ` "Available", ], }, + "ControlRunbooksEnableEC223Condition795CB580": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableEC223E02B5464", + }, + "Available", + ], + }, "ControlRunbooksEnableEC22ConditionB9E0D42E": { "Fn::Equals": [ { @@ -461,6 +509,14 @@ exports[`member stack 1`] = ` "Available", ], }, + "ControlRunbooksEnableEC24Condition72408A1B": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableEC2448A9BAD2", + }, + "Available", + ], + }, "ControlRunbooksEnableEC26ConditionF1F880B0": { "Fn::Equals": [ { @@ -477,6 +533,30 @@ exports[`member stack 1`] = ` "Available", ], }, + "ControlRunbooksEnableEC28Condition4C4640B8": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableEC287AE93AB2", + }, + "Available", + ], + }, + "ControlRunbooksEnableECR1Condition70BCAF70": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableECR1CC254C91", + }, + "Available", + ], + }, + "ControlRunbooksEnableGuardDuty1Condition97849740": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableGuardDuty1139BC2DA", + }, + "Available", + ], + }, "ControlRunbooksEnableIAM18ConditionC6288150": { "Fn::Equals": [ { @@ -637,6 +717,22 @@ exports[`member stack 1`] = ` "Available", ], }, + "ControlRunbooksEnableS311Condition6AA79443": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableS3118EE66AFD", + }, + "Available", + ], + }, + "ControlRunbooksEnableS313ConditionA95162A4": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableS3134CFE501B", + }, + "Available", + ], + }, "ControlRunbooksEnableS31Condition25C33B3F": { "Fn::Equals": [ { @@ -701,6 +797,38 @@ exports[`member stack 1`] = ` "Available", ], }, + "ControlRunbooksEnableSSM4ConditionD47FCFB5": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableSSM47E37D561", + }, + "Available", + ], + }, + "ControlRunbooksEnableSecretsManager1ConditionCE635AAF": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableSecretsManager10CFF911B", + }, + "Available", + ], + }, + "ControlRunbooksEnableSecretsManager3Condition04E1FFBB": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableSecretsManager3EFC137EE", + }, + "Available", + ], + }, + "ControlRunbooksEnableSecretsManager4ConditionCE71F44A": { + "Fn::Equals": [ + { + "Ref": "ControlRunbooksEnableSecretsManager4B15D8607", + }, + "Available", + ], + }, }, "Description": "test;", "Parameters": { @@ -722,6 +850,24 @@ exports[`member stack 1`] = ` "Description": "Enable/disable availability of remediation for pci-dss version 3.2.1 Control CloudFormation.1 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", "Type": "String", }, + "ControlRunbooksEnableCloudFront12B883E8E5": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for pci-dss version 3.2.1 Control CloudFront.12 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableCloudFront1A6026987": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for pci-dss version 3.2.1 Control CloudFront.1 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, "ControlRunbooksEnableCloudTrail1F0F927F7": { "AllowedValues": [ "Available", @@ -794,6 +940,15 @@ exports[`member stack 1`] = ` "Description": "Enable/disable availability of remediation for pci-dss version 3.2.1 Control CodeBuild.2 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", "Type": "String", }, + "ControlRunbooksEnableCodeBuild5144FBB6F": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for pci-dss version 3.2.1 Control CodeBuild.5 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, "ControlRunbooksEnableConfig19F6E6FE3": { "AllowedValues": [ "Available", @@ -830,6 +985,33 @@ exports[`member stack 1`] = ` "Description": "Enable/disable availability of remediation for pci-dss version 3.2.1 Control EC2.15 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", "Type": "String", }, + "ControlRunbooksEnableEC21822A124F1": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for pci-dss version 3.2.1 Control EC2.18 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableEC21919C72DDA": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for pci-dss version 3.2.1 Control EC2.19 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableEC223E02B5464": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for pci-dss version 3.2.1 Control EC2.23 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, "ControlRunbooksEnableEC22F9B66A60": { "AllowedValues": [ "Available", @@ -839,6 +1021,15 @@ exports[`member stack 1`] = ` "Description": "Enable/disable availability of remediation for pci-dss version 3.2.1 Control EC2.2 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", "Type": "String", }, + "ControlRunbooksEnableEC2448A9BAD2": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for pci-dss version 3.2.1 Control EC2.4 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, "ControlRunbooksEnableEC265685AB83": { "AllowedValues": [ "Available", @@ -857,6 +1048,33 @@ exports[`member stack 1`] = ` "Description": "Enable/disable availability of remediation for pci-dss version 3.2.1 Control EC2.7 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", "Type": "String", }, + "ControlRunbooksEnableEC287AE93AB2": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for pci-dss version 3.2.1 Control EC2.8 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableECR1CC254C91": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for pci-dss version 3.2.1 Control ECR.1 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableGuardDuty1139BC2DA": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for pci-dss version 3.2.1 Control GuardDuty.1 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, "ControlRunbooksEnableIAM18A4548D88": { "AllowedValues": [ "Available", @@ -1046,6 +1264,24 @@ exports[`member stack 1`] = ` "Description": "Enable/disable availability of remediation for pci-dss version 3.2.1 Control S3.1 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", "Type": "String", }, + "ControlRunbooksEnableS3118EE66AFD": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for pci-dss version 3.2.1 Control S3.11 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableS3134CFE501B": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for pci-dss version 3.2.1 Control S3.13 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, "ControlRunbooksEnableS325CF1F81C": { "AllowedValues": [ "Available", @@ -1109,6 +1345,42 @@ exports[`member stack 1`] = ` "Description": "Enable/disable availability of remediation for pci-dss version 3.2.1 Control SQS.1 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", "Type": "String", }, + "ControlRunbooksEnableSSM47E37D561": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for pci-dss version 3.2.1 Control SSM.4 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableSecretsManager10CFF911B": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for pci-dss version 3.2.1 Control SecretsManager.1 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableSecretsManager3EFC137EE": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for pci-dss version 3.2.1 Control SecretsManager.3 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, + "ControlRunbooksEnableSecretsManager4B15D8607": { + "AllowedValues": [ + "Available", + "NOT Available", + ], + "Default": "Available", + "Description": "Enable/disable availability of remediation for pci-dss version 3.2.1 Control SecretsManager.4 in Security Hub Console Custom Actions. If NOT Available the remediation cannot be triggered from the Security Hub console in the Security Hub Admin account.", + "Type": "String", + }, "SecHubAdminAccount": { "AllowedPattern": "^\\d{12}$", "Description": "Admin account number", @@ -1142,7 +1414,7 @@ Default: 30 seconds * Remediation.Output ## Documentation Links -* [AFSBP AutoScaling.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-autoscaling-1) +* [AWS FSBP AutoScaling.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-autoscaling-1) ", "mainSteps": [ { @@ -1159,106 +1431,109 @@ Default: 30 seconds "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -1268,101 +1543,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -1383,6 +1699,11 @@ def parse_event(event, _): "Selector": "$.Payload.object", "Type": "StringMap", }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, { "Name": "AutoScalingGroupName", "Selector": "$.Payload.resource_id", @@ -1501,7 +1822,7 @@ This document configures an SNS topic for notifications from a CloudFormation st * Remediation.Output ## Documentation Links -* [AFSBP v1.0.0 CloudFormation.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudformation-1) +* [AWS FSBP v1.0.0 CloudFormation.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudformation-1) ", "mainSteps": [ { @@ -1518,106 +1839,109 @@ This document configures an SNS topic for notifications from a CloudFormation st "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -1627,101 +1951,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' -def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) +""" - if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -1742,6 +2107,11 @@ def parse_event(event, _): "Selector": "$.Payload.object", "Type": "StringMap", }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, { "Name": "StackArn", "Selector": "$.Payload.resource_id", @@ -1839,26 +2209,28 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksCloudTrail1B15F1A13": { - "Condition": "ControlRunbooksEnableCloudTrail1ConditionB7EBAA86", + "ControlRunbooksCloudFront116F66FF8": { + "Condition": "ControlRunbooksEnableCloudFront1ConditionD78B5553", "DependsOn": [ "CreateWait0", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.0_CloudTrail.1 + "description": "### Document Name - ASR-AFSBP_1.0.0_CloudFront.1 + ## What does this document do? -Creates a multi-region trail with KMS encryption and enables CloudTrail -Note: this remediation will create a NEW trail. +This document configures a default root object to be returned when visiting a CloudFront distribution. ## Input Parameters * Finding: (Required) Security Hub finding details JSON * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +## Output Parameters +* Remediation.Output + ## Documentation Links -* [AFSBP CloudTrail.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-1) -", +* [AWS FSBP v1.0.0 CloudFront.1](https://docs.aws.amazon.com/securityhub/latest/userguide/cloudfront-controls.html#cloudfront-1)", "mainSteps": [ { "action": "aws:executeScript", @@ -1867,114 +2239,116 @@ Note: this remediation will create a NEW trail. "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "CloudTrail.1", - "CloudTrail.3", + "CloudFront.1", ], - "parse_id_pattern": "", + "parse_id_pattern": "^(arn:(?:aws|aws-us-gov|aws-cn):cloudfront::\\d{12}:distribution\\/([A-Z0-9]+))$", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -1984,101 +2358,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -2099,15 +2514,25 @@ def parse_event(event, _): "Selector": "$.Payload.object", "Type": "StringMap", }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "CloudFrontDistribution", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, ], }, { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-CreateCloudTrailMultiRegionTrail", + "DocumentName": "ASR-EnableCloudFrontDefaultRootObject", "RuntimeParameters": { - "AWSPartition": "{{ global:AWS_PARTITION }}", "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "CloudFrontDistribution": "{{ ParseInput.CloudFrontDistribution }}", }, }, "name": "Remediation", @@ -2123,8 +2548,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Multi-region, encrypted AWS CloudTrail successfully created", - "UpdatedBy": "ASR-PCI_3.2.1_CloudTrail.1", + "Text": "Configured default root object for CloudFront distribution", + "UpdatedBy": "ASR-PCI_3.2.1_CloudFront.1", }, "Service": "securityhub", "Workflow": { @@ -2146,12 +2571,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the CloudTrail.1 finding", + "description": "The input from the Orchestrator Step function for the CloudFront.1 finding", "type": "StringMap", }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-CreateCloudTrailMultiRegionTrail", + "default": "SO0111-EnableCloudFrontDefaultRootObject", "type": "String", }, }, @@ -2159,7 +2584,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_CloudTrail.1", + "Name": "ASR-PCI_3.2.1_CloudFront.1", "Tags": [ { "Key": "CdkGenerated", @@ -2170,17 +2595,17 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksCloudTrail2979D0B5D": { - "Condition": "ControlRunbooksEnableCloudTrail2ConditionC182A10F", + "ControlRunbooksCloudFront1283E53E96": { + "Condition": "ControlRunbooksEnableCloudFront12Condition59835E00", "DependsOn": [ "CreateWait0", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.0_CloudTrail.2 + "description": "### Document Name - ASR-NIST_800_53_CloudFront.12 ## What does this document do? -This document enables SSE KMS encryption for log files using the ASR remediation KMS CMK +This document enables sets the origin domain to a non-existent value to prevent a potential malicious takeover. ## Input Parameters * Finding: (Required) Security Hub finding details JSON * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. @@ -2188,7 +2613,7 @@ This document enables SSE KMS encryption for log files using the ASR remediation * Remediation.Output - Output from the remediation ## Documentation Links -* [AFSBP CloudTrail.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-2) +* [NIST CloudFront.1](https://docs.aws.amazon.com/securityhub/latest/userguide/cloudfront-controls.html#cloudfront-12) ", "mainSteps": [ { @@ -2198,113 +2623,116 @@ This document enables SSE KMS encryption for log files using the ASR remediation "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "CloudTrail.2", + "CloudFront.12", ], - "parse_id_pattern": "", + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):cloudfront::[0-9]{12}:distribution\\/([A-Z0-9]*)$", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -2314,101 +2742,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -2430,13 +2899,13 @@ def parse_event(event, _): "Type": "StringMap", }, { - "Name": "TrailArn", - "Selector": "$.Payload.resource_id", - "Type": "String", + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", }, { - "Name": "RemediationRegion", - "Selector": "$.Payload.resource_region", + "Name": "DistributionId", + "Selector": "$.Payload.resource_id", "Type": "String", }, ], @@ -2444,12 +2913,10 @@ def parse_event(event, _): { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-EnableCloudTrailEncryption", + "DocumentName": "ASR-SetCloudFrontOriginDomain", "RuntimeParameters": { "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "KMSKeyArn": "{{ KMSKeyArn }}", - "TrailArn": "{{ ParseInput.TrailArn }}", - "TrailRegion": "{{ ParseInput.RemediationRegion }}", + "DistributionId": "{{ ParseInput.DistributionId }}", }, }, "name": "Remediation", @@ -2465,8 +2932,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Encryption enabled on CloudTrail", - "UpdatedBy": "ASR-PCI_3.2.1_CloudTrail.2", + "Text": "Set CloudFront origin domain to safe value.", + "UpdatedBy": "ASR-PCI_3.2.1_CloudFront.12", }, "Service": "securityhub", "Workflow": { @@ -2488,17 +2955,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the CloudTrail.2 finding", + "description": "The input from the Orchestrator Step function for the CloudFront.12 finding", "type": "StringMap", }, - "KMSKeyArn": { - "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):kms:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:(?:(?:alias\\/[A-Za-z0-9/_-])|(?:key\\/(?:[0-9a-fA-F]{8}-(?:[0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12})))$", - "default": "{{ssm:/Solutions/SO0111/CMK_REMEDIATION_ARN}}", - "type": "String", - }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-EnableCloudTrailEncryption", + "default": "SO0111-SetCloudFrontOriginDomain", "type": "String", }, }, @@ -2506,7 +2968,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_CloudTrail.2", + "Name": "ASR-PCI_3.2.1_CloudFront.12", "Tags": [ { "Key": "CdkGenerated", @@ -2517,28 +2979,25 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksCloudTrail4057F669F": { - "Condition": "ControlRunbooksEnableCloudTrail4Condition587734A2", + "ControlRunbooksCloudTrail1B15F1A13": { + "Condition": "ControlRunbooksEnableCloudTrail1ConditionB7EBAA86", "DependsOn": [ "CreateWait0", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.0_CloudTrail.4 - + "description": "### Document Name - ASR-AFSBP_1.0.0_CloudTrail.1 ## What does this document do? -This document enables CloudTrail log file validation. +Creates a multi-region trail with KMS encryption and enables CloudTrail +Note: this remediation will create a NEW trail. ## Input Parameters * Finding: (Required) Security Hub finding details JSON * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. -## Output Parameters -* Remediation.Output - ## Documentation Links -* [AFSBP v1.0.0 CloudTrail.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-4) +* [AWS FSBP CloudTrail.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-1) ", "mainSteps": [ { @@ -2548,113 +3007,117 @@ This document enables CloudTrail log file validation. "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "CloudTrail.4", + "CloudTrail.1", + "CloudTrail.3", ], - "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):cloudtrail:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:trail\\/([A-Za-z0-9._-]{3,128})$", + "parse_id_pattern": "", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -2664,101 +3127,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -2780,41 +3284,20 @@ def parse_event(event, _): "Type": "StringMap", }, { - "Name": "TrailName", - "Selector": "$.Payload.resource_id", - "Type": "String", - }, - { - "Name": "RemediationAccount", - "Selector": "$.Payload.account_id", - "Type": "String", - }, - { - "Name": "RemediationRegion", - "Selector": "$.Payload.resource_region", - "Type": "String", + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", }, ], }, { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-EnableCloudTrailLogFileValidation", + "DocumentName": "ASR-CreateCloudTrailMultiRegionTrail", "RuntimeParameters": { + "AWSPartition": "{{ global:AWS_PARTITION }}", "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "TrailName": "{{ ParseInput.TrailName }}", }, - "TargetLocations": [ - { - "Accounts": [ - "{{ ParseInput.RemediationAccount }}", - ], - "ExecutionRoleName": "{{ RemediationRoleName }}", - "Regions": [ - "{{ ParseInput.RemediationRegion }}", - ], - }, - ], }, "name": "Remediation", }, @@ -2829,8 +3312,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Enabled CloudTrail log file validation.", - "UpdatedBy": "ASR-PCI_3.2.1_CloudTrail.4", + "Text": "Multi-region, encrypted AWS CloudTrail successfully created", + "UpdatedBy": "ASR-PCI_3.2.1_CloudTrail.1", }, "Service": "securityhub", "Workflow": { @@ -2852,12 +3335,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the CloudTrail.4 finding", + "description": "The input from the Orchestrator Step function for the CloudTrail.1 finding", "type": "StringMap", }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-EnableCloudTrailLogFileValidation", + "default": "SO0111-CreateCloudTrailMultiRegionTrail", "type": "String", }, }, @@ -2865,7 +3348,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_CloudTrail.4", + "Name": "ASR-PCI_3.2.1_CloudTrail.1", "Tags": [ { "Key": "CdkGenerated", @@ -2876,28 +3359,25 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksCloudTrail54F5ED8E4": { - "Condition": "ControlRunbooksEnableCloudTrail5Condition17B6B536", + "ControlRunbooksCloudTrail2979D0B5D": { + "Condition": "ControlRunbooksEnableCloudTrail2ConditionC182A10F", "DependsOn": [ "CreateWait1", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.0_CloudTrail.5 - + "description": "### Document Name - ASR-AFSBP_1.0.0_CloudTrail.2 ## What does this document do? -This document configures CloudTrail to log to CloudWatch Logs. - +This document enables SSE KMS encryption for log files using the ASR remediation KMS CMK ## Input Parameters * Finding: (Required) Security Hub finding details JSON * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. - ## Output Parameters -* Remediation.Output - Remediation results +* Remediation.Output - Output from the remediation ## Documentation Links -* [AFSBP v1.0.0 CloudTrail.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-5) +* [AWS FSBP CloudTrail.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-2) ", "mainSteps": [ { @@ -2907,113 +3387,116 @@ This document configures CloudTrail to log to CloudWatch Logs. "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "CloudTrail.5", + "CloudTrail.2", ], - "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):cloudtrail:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:trail\\/([A-Za-z0-9._-]{3,128})$", + "parse_id_pattern": "", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -3023,101 +3506,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -3139,7 +3663,12 @@ def parse_event(event, _): "Type": "StringMap", }, { - "Name": "TrailName", + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "TrailArn", "Selector": "$.Payload.resource_id", "Type": "String", }, @@ -3158,12 +3687,11 @@ def parse_event(event, _): { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-EnableCloudTrailToCloudWatchLogging", + "DocumentName": "ASR-EnableCloudTrailEncryption", "RuntimeParameters": { "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "CloudWatchLogsRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/SO0111-CloudTrailToCloudWatchLogs", - "LogGroupName": "CloudTrail/{{ ParseInput.TrailName }}", - "TrailName": "{{ ParseInput.TrailName }}", + "TrailArn": "{{ ParseInput.TrailArn }}", + "TrailRegion": "{{ ParseInput.RemediationRegion }}", }, "TargetLocations": [ { @@ -3190,8 +3718,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Configured CloudTrail logging to CloudWatch Logs Group CloudTrail/{{ ParseInput.TrailName }}", - "UpdatedBy": "ASR-PCI_3.2.1_CloudTrail.5", + "Text": "Encryption enabled on CloudTrail", + "UpdatedBy": "ASR-PCI_3.2.1_CloudTrail.2", }, "Service": "securityhub", "Workflow": { @@ -3213,12 +3741,17 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the CloudTrail.5 finding", + "description": "The input from the Orchestrator Step function for the CloudTrail.2 finding", "type": "StringMap", }, + "KMSKeyArn": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):kms:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:(?:(?:alias\\/[A-Za-z0-9/_-])|(?:key\\/(?:[0-9a-fA-F]{8}-(?:[0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12})))$", + "default": "{{ssm:/Solutions/SO0111/CMK_REMEDIATION_ARN}}", + "type": "String", + }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-EnableCloudTrailToCloudWatchLogging", + "default": "SO0111-EnableCloudTrailEncryption", "type": "String", }, }, @@ -3226,7 +3759,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_CloudTrail.5", + "Name": "ASR-PCI_3.2.1_CloudTrail.2", "Tags": [ { "Key": "CdkGenerated", @@ -3237,18 +3770,18 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksCloudTrail6526C5643": { - "Condition": "ControlRunbooksEnableCloudTrail6Condition486CC2C3", + "ControlRunbooksCloudTrail4057F669F": { + "Condition": "ControlRunbooksEnableCloudTrail4Condition587734A2", "DependsOn": [ "CreateWait1", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-CIS_1.2.0_2.3 + "description": "### Document Name - ASR-AFSBP_1.0.0_CloudTrail.4 ## What does this document do? -This document blocks public access to the CloudTrail S3 bucket. +This document enables CloudTrail log file validation. ## Input Parameters * Finding: (Required) Security Hub finding details JSON @@ -3258,7 +3791,7 @@ This document blocks public access to the CloudTrail S3 bucket. * Remediation.Output ## Documentation Links -* [CIS v1.2.0 2.3](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-2.3) +* [AWS FSBP v1.0.0 CloudTrail.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-4) ", "mainSteps": [ { @@ -3268,113 +3801,116 @@ This document blocks public access to the CloudTrail S3 bucket. "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "CloudTrail.6", + "CloudTrail.4", ], - "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):s3:::([A-Za-z0-9.-]{3,63})$", + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):cloudtrail:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:trail\\/([A-Za-z0-9._-]{3,128})$", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -3384,101 +3920,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -3500,24 +4077,46 @@ def parse_event(event, _): "Type": "StringMap", }, { - "Name": "BucketName", + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "TrailName", "Selector": "$.Payload.resource_id", "Type": "String", }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, ], }, { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-ConfigureS3BucketPublicAccessBlock", + "DocumentName": "ASR-EnableCloudTrailLogFileValidation", "RuntimeParameters": { "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "BlockPublicAcls": true, - "BlockPublicPolicy": true, - "BucketName": "{{ ParseInput.BucketName }}", - "IgnorePublicAcls": true, - "RestrictPublicBuckets": true, + "TrailName": "{{ ParseInput.TrailName }}", }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], }, "name": "Remediation", }, @@ -3532,8 +4131,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Disabled public access to CloudTrail logs bucket.", - "UpdatedBy": "ASR-PCI_3.2.1_CloudTrail.6", + "Text": "Enabled CloudTrail log file validation.", + "UpdatedBy": "ASR-PCI_3.2.1_CloudTrail.4", }, "Service": "securityhub", "Workflow": { @@ -3555,12 +4154,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the CloudTrail.6 finding", + "description": "The input from the Orchestrator Step function for the CloudTrail.4 finding", "type": "StringMap", }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-ConfigureS3BucketPublicAccessBlock", + "default": "SO0111-EnableCloudTrailLogFileValidation", "type": "String", }, }, @@ -3568,7 +4167,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_CloudTrail.6", + "Name": "ASR-PCI_3.2.1_CloudTrail.4", "Tags": [ { "Key": "CdkGenerated", @@ -3579,18 +4178,18 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksCloudTrail7C6D85038": { - "Condition": "ControlRunbooksEnableCloudTrail7ConditionA4FF88B2", + "ControlRunbooksCloudTrail54F5ED8E4": { + "Condition": "ControlRunbooksEnableCloudTrail5Condition17B6B536", "DependsOn": [ "CreateWait1", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-CIS_1.2.0_2.6 + "description": "### Document Name - ASR-AFSBP_1.0.0_CloudTrail.5 ## What does this document do? -Configures access logging for a CloudTrail S3 bucket. +This document configures CloudTrail to log to CloudWatch Logs. ## Input Parameters * Finding: (Required) Security Hub finding details JSON @@ -3600,7 +4199,7 @@ Configures access logging for a CloudTrail S3 bucket. * Remediation.Output - Remediation results ## Documentation Links -* [CIS v1.2.0 2.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-2.6) +* [AWS FSBP v1.0.0 CloudTrail.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-cloudtrail-5) ", "mainSteps": [ { @@ -3610,113 +4209,116 @@ Configures access logging for a CloudTrail S3 bucket. "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "CloudTrail.7", + "CloudTrail.5", ], - "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):s3:::([A-Za-z0-9.-]{3,63})$", + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):cloudtrail:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:trail\\/([A-Za-z0-9._-]{3,128})$", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -3726,101 +4328,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -3842,46 +4485,48 @@ def parse_event(event, _): "Type": "StringMap", }, { - "Name": "BucketName", + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "TrailName", "Selector": "$.Payload.resource_id", "Type": "String", }, - ], - }, - { - "action": "aws:executeAutomation", - "inputs": { - "DocumentName": "ASR-CreateAccessLoggingBucket", - "RuntimeParameters": { - "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/SO0111-CreateAccessLoggingBucket", - "BucketName": "so0111-cloudtrailaccesslogs-{{ global:ACCOUNT_ID }}-{{ global:REGION }}", + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", }, - }, - "name": "CreateAccessLoggingBucket", + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], }, { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "AWS-ConfigureS3BucketLogging", + "DocumentName": "ASR-EnableCloudTrailToCloudWatchLogging", "RuntimeParameters": { "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "BucketName": "{{ ParseInput.BucketName }}", - "GrantedPermission": [ - "READ", - ], - "GranteeType": [ - "Group", - ], - "GranteeUri": [ - "http://acs.amazonaws.com/groups/s3/LogDelivery", - ], - "TargetBucket": [ - "so0111-cloudtrailaccesslogs-{{ global:ACCOUNT_ID }}-{{ global:REGION }}", - ], - "TargetPrefix": [ - "{{ ParseInput.BucketName }}", - ], + "CloudWatchLogsRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/SO0111-CloudTrailToCloudWatchLogs", + "LogGroupName": "CloudTrail/{{ ParseInput.TrailName }}", + "TrailName": "{{ ParseInput.TrailName }}", }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], }, "name": "Remediation", }, @@ -3896,8 +4541,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Created S3 bucket so0111-cloudtrailaccesslogs-{{ global:ACCOUNT_ID }}-{{ global:REGION }} for logging access to {{ ParseInput.BucketName }}", - "UpdatedBy": "ASR-PCI_3.2.1_CloudTrail.7", + "Text": "Configured CloudTrail logging to CloudWatch Logs Group CloudTrail/{{ ParseInput.TrailName }}", + "UpdatedBy": "ASR-PCI_3.2.1_CloudTrail.5", }, "Service": "securityhub", "Workflow": { @@ -3919,12 +4564,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the CloudTrail.7 finding", + "description": "The input from the Orchestrator Step function for the CloudTrail.5 finding", "type": "StringMap", }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-ConfigureS3BucketLogging", + "default": "SO0111-EnableCloudTrailToCloudWatchLogging", "type": "String", }, }, @@ -3932,7 +4577,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_CloudTrail.7", + "Name": "ASR-PCI_3.2.1_CloudTrail.5", "Tags": [ { "Key": "CdkGenerated", @@ -3943,57 +4588,28 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksCloudWatch1A05F543A": { - "Condition": "ControlRunbooksEnableCloudWatch1ConditionAB0DF2E5", + "ControlRunbooksCloudTrail6526C5643": { + "Condition": "ControlRunbooksEnableCloudTrail6Condition486CC2C3", "DependsOn": [ "CreateWait1", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-CIS_1.2.0_3.x + "description": "### Document Name - ASR-CIS_1.2.0_2.3 ## What does this document do? -Remediates the following CIS findings: - -3.1 - Creates a log metric filter and alarm for unauthorized API calls -3.2 - Creates a log metric filter and alarm for AWS Management Console sign-in without MFA -3.3 - Creates a log metric filter and alarm for usage of "root" account -3.4 - Creates a log metric filter and alarm for for IAM policy changes -3.5 - Creates a log metric filter and alarm for CloudTrail configuration changes -3.6 - Creates a log metric filter and alarm for AWS Management Console authentication failures -3.7 - Creates a log metric filter and alarm for disabling or scheduled deletion of customer created CMKs -3.8 - Creates a log metric filter and alarm for S3 bucket policy changes -3.9 - Creates a log metric filter and alarm for AWS Config configuration changes -3.10 - Creates a log metric filter and alarm for security group changes -3.11 - Creates a log metric filter and alarm for changes to Network Access Control Lists (NACL) -3.12 - Creates a log metric filter and alarm for changes to network gateways -3.13 - Creates a log metric filter and alarm for route table changes -3.14 - Creates a log metric filter and alarm for VPC changes - +This document blocks public access to the CloudTrail S3 bucket. ## Input Parameters * Finding: (Required) Security Hub finding details JSON * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. ## Output Parameters -* Remediation.Output - Output of remediation runbook. +* Remediation.Output ## Documentation Links -[CIS v1.2.0 3.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.1) -[CIS v1.2.0 3.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.2) -[CIS v1.2.0 3.3](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.3) -[CIS v1.2.0 3.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.4) -[CIS v1.2.0 3.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.5) -[CIS v1.2.0 3.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.6) -[CIS v1.2.0 3.7](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.7) -[CIS v1.2.0 3.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.8) -[CIS v1.2.0 3.9](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.9) -[CIS v1.2.0 3.10](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.10) -[CIS v1.2.0 3.11](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.11) -[CIS v1.2.0 3.12](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.12) -[CIS v1.2.0 3.13](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.13) -[CIS v1.2.0 3.14](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.14) +* [CIS v1.2.0 2.3](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-2.3) ", "mainSteps": [ { @@ -4003,126 +4619,116 @@ Remediates the following CIS findings: "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "CloudWatch.1", - "CloudWatch.2", - "CloudWatch.3", - "CloudWatch.4", - "CloudWatch.5", - "CloudWatch.6", - "CloudWatch.7", - "CloudWatch.8", - "CloudWatch.9", - "CloudWatch.10", - "CloudWatch.11", - "CloudWatch.12", - "CloudWatch.13", - "CloudWatch.14", + "CloudTrail.6", ], - "parse_id_pattern": "", + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):s3:::([A-Za-z0-9.-]{3,63})$", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -4132,101 +4738,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -4248,283 +4895,6755 @@ def parse_event(event, _): "Type": "StringMap", }, { - "Name": "ControlId", - "Selector": "$.Payload.control_id", + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "BucketName", + "Selector": "$.Payload.resource_id", "Type": "String", }, ], }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-ConfigureS3BucketPublicAccessBlock", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "BlockPublicAcls": true, + "BlockPublicPolicy": true, + "BucketName": "{{ ParseInput.BucketName }}", + "IgnorePublicAcls": true, + "RestrictPublicBuckets": true, + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Disabled public access to CloudTrail logs bucket.", + "UpdatedBy": "ASR-PCI_3.2.1_CloudTrail.6", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the CloudTrail.6 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-ConfigureS3BucketPublicAccessBlock", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-PCI_3.2.1_CloudTrail.6", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksCloudTrail7C6D85038": { + "Condition": "ControlRunbooksEnableCloudTrail7ConditionA4FF88B2", + "DependsOn": [ + "CreateWait1", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-CIS_1.2.0_2.6 + +## What does this document do? +Configures access logging for a CloudTrail S3 bucket. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output - Remediation results + +## Documentation Links +* [CIS v1.2.0 2.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-2.6) +", + "mainSteps": [ { "action": "aws:executeScript", "inputs": { - "Handler": "verify", + "Handler": "parse_event", "InputPayload": { - "ControlId": "{{ ParseInput.ControlId }}", - "StandardLongName": "pci-dss", - "StandardVersion": "3.2.1", + "Finding": "{{ Finding }}", + "expected_control_id": [ + "CloudTrail.7", + "S3.9", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):s3:::([A-Za-z0-9.-]{3,63})$", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any -unauthorizedAPICallsFilter = { - "filter_name": "UnauthorizedAPICalls", - "filter_pattern": '{($.errorCode="*UnauthorizedOperation") || ($.errorCode="AccessDenied*")}', - "metric_name": "UnauthorizedAPICalls", - "metric_value": 1, - "alarm_name": "UnauthorizedAPICalls", - "alarm_desc": "Alarm for UnauthorizedAPICalls > 0", - "alarm_threshold": 1 -} +import boto3 +from botocore.config import Config -consoleSignInWithoutMFAFilter = { - "filter_name": "ConsoleSigninWithoutMFA", - "filter_pattern": '{($.eventName="ConsoleLogin") && ($.additionalEventData.MFAUsed !="Yes")}', - "metric_name": "ConsoleSigninWithoutMFA", - "metric_value": 1, - "alarm_name": "ConsoleSigninWithoutMFA", - "alarm_desc": "Alarm for ConsoleSigninWithoutMFA > 0", - "alarm_threshold": 1 - } -rootAccountUsageFilter = { - "filter_name": "RootAccountUsage", - "filter_pattern": '{$.userIdentity.type="Root" && $.userIdentity.invokedBy NOT EXISTS && $.eventType !="AwsServiceEvent"}', - "metric_name": "RootAccountUsage", - "metric_value": 1, - "alarm_name": "RootAccountUsage", - "alarm_desc": "Alarm for RootAccountUsage > 0", - "alarm_threshold": 1 -} +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) -iamPolicyChangesFilter = { - "filter_name": "IAMPolicyChanges", - "filter_pattern": '{($.eventName=DeleteGroupPolicy) || ($.eventName=DeleteRolePolicy) || ($.eventName=DeleteUserPolicy) || ($.eventName=PutGroupPolicy) || ($.eventName=PutRolePolicy) || ($.eventName=PutUserPolicy) || ($.eventName=CreatePolicy) || ($.eventName=DeletePolicy) || ($.eventName=CreatePolicyVersion) || ($.eventName=DeletePolicyVersion) || ($.eventName=AttachRolePolicy) || ($.eventName=DetachRolePolicy) || ($.eventName=AttachUserPolicy) || ($.eventName=DetachUserPolicy) || ($.eventName=AttachGroupPolicy) || ($.eventName=DetachGroupPolicy)}', - "metric_name": "IAMPolicyChanges", - "metric_value": 1, - "alarm_name": "IAMPolicyChanges", - "alarm_desc": "Alarm for IAMPolicyChanges > 0", - "alarm_threshold": 1 - } -cloudtrailChangesFilter = { - "filter_name": "CloudTrailChanges", - "filter_pattern": '{($.eventName=CreateTrail) || ($.eventName=UpdateTrail) || ($.eventName=DeleteTrail) || ($.eventName=StartLogging) || ($.eventName=StopLogging)}', - "metric_name": "CloudTrailChanges", - "metric_value": 1, - "alarm_name": "CloudTrailChanges", - "alarm_desc": "Alarm for CloudTrailChanges > 0", - "alarm_threshold": 1 -} +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) -consoleAuthenticationFailureFilter = { - "filter_name": "ConsoleAuthenticationFailure", - "filter_pattern": '{($.eventName=ConsoleLogin) && ($.errorMessage="Failed authentication")}', - "metric_name": "ConsoleAuthenticationFailure", - "metric_value": 1, - "alarm_name": "ConsoleAuthenticationFailure", - "alarm_desc": "Alarm for ConsoleAuthenticationFailure > 0", - "alarm_threshold": 1 - } -disableOrDeleteCMKFilter = { - "filter_name": "DisableOrDeleteCMK", - "filter_pattern": '{($.eventSource=kms.amazonaws.com) && (($.eventName=DisableKey) || ($.eventName=ScheduleKeyDeletion))}', - "metric_name": "DisableOrDeleteCMK", - "metric_value": 1, - "alarm_name": "DisableOrDeleteCMK", - "alarm_desc": "Alarm for DisableOrDeleteCMK > 0", - "alarm_threshold": 1 -} +def get_solution_id(): + return "SO0111" -s3BucketPolicyChangesFilter = { - "filter_name": "S3BucketPolicyChanges", - "filter_pattern": '{($.eventSource=s3.amazonaws.com) && (($.eventName=PutBucketAcl) || ($.eventName=PutBucketPolicy) || ($.eventName=PutBucketCors) || ($.eventName=PutBucketLifecycle) || ($.eventName=PutBucketReplication) || ($.eventName=DeleteBucketPolicy) || ($.eventName=DeleteBucketCors) || ($.eventName=DeleteBucketLifecycle) || ($.eventName=DeleteBucketReplication))}', - "metric_name": "S3BucketPolicyChanges", - "metric_value": 1, - "alarm_name": "S3BucketPolicyChanges", - "alarm_desc": "Alarm for S3BucketPolicyChanges > 0", - "alarm_threshold": 1 -} -awsConfigChangesFilter = { - "filter_name": "AWSConfigChanges", - "filter_pattern": '{($.eventSource=config.amazonaws.com) && (($.eventName=StopConfigurationRecorder) || ($.eventName=DeleteDeliveryChannel) || ($.eventName=PutDeliveryChannel) || ($.eventName=PutConfigurationRecorder))}', - "metric_name": "AWSConfigChanges", - "metric_value": 1, - "alarm_name": "AWSConfigChanges", - "alarm_desc": "Alarm for AWSConfigChanges > 0", - "alarm_threshold": 1 -} +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "BucketName", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-CreateAccessLoggingBucket", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/SO0111-CreateAccessLoggingBucket", + "BucketName": "so0111-cloudtrailaccesslogs-{{ global:ACCOUNT_ID }}-{{ global:REGION }}", + }, + }, + "name": "CreateAccessLoggingBucket", + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "AWS-ConfigureS3BucketLogging", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "BucketName": "{{ ParseInput.BucketName }}", + "GrantedPermission": [ + "READ", + ], + "GranteeType": [ + "Group", + ], + "GranteeUri": [ + "http://acs.amazonaws.com/groups/s3/LogDelivery", + ], + "TargetBucket": [ + "so0111-cloudtrailaccesslogs-{{ global:ACCOUNT_ID }}-{{ global:REGION }}", + ], + "TargetPrefix": [ + "{{ ParseInput.BucketName }}", + ], + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Created S3 bucket so0111-cloudtrailaccesslogs-{{ global:ACCOUNT_ID }}-{{ global:REGION }} for logging access to {{ ParseInput.BucketName }}", + "UpdatedBy": "ASR-PCI_3.2.1_CloudTrail.7", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the CloudTrail.7 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-ConfigureS3BucketLogging", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-PCI_3.2.1_CloudTrail.7", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksCloudWatch1A05F543A": { + "Condition": "ControlRunbooksEnableCloudWatch1ConditionAB0DF2E5", + "DependsOn": [ + "CreateWait2", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-CIS_1.2.0_3.x + +## What does this document do? +Remediates the following CIS findings: + +3.1 - Creates a log metric filter and alarm for unauthorized API calls +3.2 - Creates a log metric filter and alarm for AWS Management Console sign-in without MFA +3.3 - Creates a log metric filter and alarm for usage of "root" account +3.4 - Creates a log metric filter and alarm for for IAM policy changes +3.5 - Creates a log metric filter and alarm for CloudTrail configuration changes +3.6 - Creates a log metric filter and alarm for AWS Management Console authentication failures +3.7 - Creates a log metric filter and alarm for disabling or scheduled deletion of customer created CMKs +3.8 - Creates a log metric filter and alarm for S3 bucket policy changes +3.9 - Creates a log metric filter and alarm for AWS Config configuration changes +3.10 - Creates a log metric filter and alarm for security group changes +3.11 - Creates a log metric filter and alarm for changes to Network Access Control Lists (NACL) +3.12 - Creates a log metric filter and alarm for changes to network gateways +3.13 - Creates a log metric filter and alarm for route table changes +3.14 - Creates a log metric filter and alarm for VPC changes + + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output - Output of remediation runbook. + +## Documentation Links +[CIS v1.2.0 3.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.1) +[CIS v1.2.0 3.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.2) +[CIS v1.2.0 3.3](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.3) +[CIS v1.2.0 3.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.4) +[CIS v1.2.0 3.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.5) +[CIS v1.2.0 3.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.6) +[CIS v1.2.0 3.7](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.7) +[CIS v1.2.0 3.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.8) +[CIS v1.2.0 3.9](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.9) +[CIS v1.2.0 3.10](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.10) +[CIS v1.2.0 3.11](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.11) +[CIS v1.2.0 3.12](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.12) +[CIS v1.2.0 3.13](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.13) +[CIS v1.2.0 3.14](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-3.14) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "CloudWatch.1", + "CloudWatch.2", + "CloudWatch.3", + "CloudWatch.4", + "CloudWatch.5", + "CloudWatch.6", + "CloudWatch.7", + "CloudWatch.8", + "CloudWatch.9", + "CloudWatch.10", + "CloudWatch.11", + "CloudWatch.12", + "CloudWatch.13", + "CloudWatch.14", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "ControlId", + "Selector": "$.Payload.control_id", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeScript", + "inputs": { + "Handler": "verify", + "InputPayload": { + "ControlId": "{{ ParseInput.ControlId }}", + "StandardLongName": "pci-dss", + "StandardVersion": "3.2.1", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +unauthorizedAPICallsFilter = { + "filter_name": "UnauthorizedAPICalls", + "filter_pattern": '{($.errorCode="*UnauthorizedOperation") || ($.errorCode="AccessDenied*")}', + "metric_name": "UnauthorizedAPICalls", + "metric_value": 1, + "alarm_name": "UnauthorizedAPICalls", + "alarm_desc": "Alarm for UnauthorizedAPICalls > 0", + "alarm_threshold": 1, +} + +consoleSignInWithoutMFAFilter = { + "filter_name": "ConsoleSigninWithoutMFA", + "filter_pattern": '{($.eventName="ConsoleLogin") && ($.additionalEventData.MFAUsed !="Yes")}', + "metric_name": "ConsoleSigninWithoutMFA", + "metric_value": 1, + "alarm_name": "ConsoleSigninWithoutMFA", + "alarm_desc": "Alarm for ConsoleSigninWithoutMFA > 0", + "alarm_threshold": 1, +} + +rootAccountUsageFilter = { + "filter_name": "RootAccountUsage", + "filter_pattern": '{$.userIdentity.type="Root" && $.userIdentity.invokedBy NOT EXISTS && $.eventType !="AwsServiceEvent"}', + "metric_name": "RootAccountUsage", + "metric_value": 1, + "alarm_name": "RootAccountUsage", + "alarm_desc": "Alarm for RootAccountUsage > 0", + "alarm_threshold": 1, +} + +iamPolicyChangesFilter = { + "filter_name": "IAMPolicyChanges", + "filter_pattern": "{($.eventName=DeleteGroupPolicy) || ($.eventName=DeleteRolePolicy) || ($.eventName=DeleteUserPolicy) || ($.eventName=PutGroupPolicy) || ($.eventName=PutRolePolicy) || ($.eventName=PutUserPolicy) || ($.eventName=CreatePolicy) || ($.eventName=DeletePolicy) || ($.eventName=CreatePolicyVersion) || ($.eventName=DeletePolicyVersion) || ($.eventName=AttachRolePolicy) || ($.eventName=DetachRolePolicy) || ($.eventName=AttachUserPolicy) || ($.eventName=DetachUserPolicy) || ($.eventName=AttachGroupPolicy) || ($.eventName=DetachGroupPolicy)}", + "metric_name": "IAMPolicyChanges", + "metric_value": 1, + "alarm_name": "IAMPolicyChanges", + "alarm_desc": "Alarm for IAMPolicyChanges > 0", + "alarm_threshold": 1, +} + +cloudtrailChangesFilter = { + "filter_name": "CloudTrailChanges", + "filter_pattern": "{($.eventName=CreateTrail) || ($.eventName=UpdateTrail) || ($.eventName=DeleteTrail) || ($.eventName=StartLogging) || ($.eventName=StopLogging)}", + "metric_name": "CloudTrailChanges", + "metric_value": 1, + "alarm_name": "CloudTrailChanges", + "alarm_desc": "Alarm for CloudTrailChanges > 0", + "alarm_threshold": 1, +} + +consoleAuthenticationFailureFilter = { + "filter_name": "ConsoleAuthenticationFailure", + "filter_pattern": '{($.eventName=ConsoleLogin) && ($.errorMessage="Failed authentication")}', + "metric_name": "ConsoleAuthenticationFailure", + "metric_value": 1, + "alarm_name": "ConsoleAuthenticationFailure", + "alarm_desc": "Alarm for ConsoleAuthenticationFailure > 0", + "alarm_threshold": 1, +} + +disableOrDeleteCMKFilter = { + "filter_name": "DisableOrDeleteCMK", + "filter_pattern": "{($.eventSource=kms.amazonaws.com) && (($.eventName=DisableKey) || ($.eventName=ScheduleKeyDeletion))}", + "metric_name": "DisableOrDeleteCMK", + "metric_value": 1, + "alarm_name": "DisableOrDeleteCMK", + "alarm_desc": "Alarm for DisableOrDeleteCMK > 0", + "alarm_threshold": 1, +} + +s3BucketPolicyChangesFilter = { + "filter_name": "S3BucketPolicyChanges", + "filter_pattern": "{($.eventSource=s3.amazonaws.com) && (($.eventName=PutBucketAcl) || ($.eventName=PutBucketPolicy) || ($.eventName=PutBucketCors) || ($.eventName=PutBucketLifecycle) || ($.eventName=PutBucketReplication) || ($.eventName=DeleteBucketPolicy) || ($.eventName=DeleteBucketCors) || ($.eventName=DeleteBucketLifecycle) || ($.eventName=DeleteBucketReplication))}", + "metric_name": "S3BucketPolicyChanges", + "metric_value": 1, + "alarm_name": "S3BucketPolicyChanges", + "alarm_desc": "Alarm for S3BucketPolicyChanges > 0", + "alarm_threshold": 1, +} + +awsConfigChangesFilter = { + "filter_name": "AWSConfigChanges", + "filter_pattern": "{($.eventSource=config.amazonaws.com) && (($.eventName=StopConfigurationRecorder) || ($.eventName=DeleteDeliveryChannel) || ($.eventName=PutDeliveryChannel) || ($.eventName=PutConfigurationRecorder))}", + "metric_name": "AWSConfigChanges", + "metric_value": 1, + "alarm_name": "AWSConfigChanges", + "alarm_desc": "Alarm for AWSConfigChanges > 0", + "alarm_threshold": 1, +} + +securityGroupChangesFilter = { + "filter_name": "SecurityGroupChanges", + "filter_pattern": "{($.eventName=AuthorizeSecurityGroupIngress) || ($.eventName=AuthorizeSecurityGroupEgress) || ($.eventName=RevokeSecurityGroupIngress) || ($.eventName=RevokeSecurityGroupEgress) || ($.eventName=CreateSecurityGroup) || ($.eventName=DeleteSecurityGroup)}", + "metric_name": "SecurityGroupChanges", + "metric_value": 1, + "alarm_name": "SecurityGroupChanges", + "alarm_desc": "Alarm for SecurityGroupChanges > 0", + "alarm_threshold": 1, +} + +networkACLChangesFilter = { + "filter_name": "NetworkACLChanges", + "filter_pattern": "{($.eventName=CreateNetworkAcl) || ($.eventName=CreateNetworkAclEntry) || ($.eventName=DeleteNetworkAcl) || ($.eventName=DeleteNetworkAclEntry) || ($.eventName=ReplaceNetworkAclEntry) || ($.eventName=ReplaceNetworkAclAssociation)}", + "metric_name": "NetworkACLChanges", + "metric_value": 1, + "alarm_name": "NetworkACLChanges", + "alarm_desc": "Alarm for NetworkACLChanges > 0", + "alarm_threshold": 1, +} + +networkGatewayChangesFilter = { + "filter_name": "NetworkGatewayChanges", + "filter_pattern": "{($.eventName=CreateCustomerGateway) || ($.eventName=DeleteCustomerGateway) || ($.eventName=AttachInternetGateway) || ($.eventName=CreateInternetGateway) || ($.eventName=DeleteInternetGateway) || ($.eventName=DetachInternetGateway)}", + "metric_name": "NetworkGatewayChanges", + "metric_value": 1, + "alarm_name": "NetworkGatewayChanges", + "alarm_desc": "Alarm for NetworkGatewayChanges > 0", + "alarm_threshold": 1, +} + +routeTableChangesFilter = { + "filter_name": "RouteTableChanges", + "filter_pattern": "{($.eventName=CreateRoute) || ($.eventName=CreateRouteTable) || ($.eventName=ReplaceRoute) || ($.eventName=ReplaceRouteTableAssociation) || ($.eventName=DeleteRouteTable) || ($.eventName=DeleteRoute) || ($.eventName=DisassociateRouteTable)}", + "metric_name": "RouteTableChanges", + "metric_value": 1, + "alarm_name": "RouteTableChanges", + "alarm_desc": "Alarm for RouteTableChanges > 0", + "alarm_threshold": 1, +} + +vpcChangesFilter = { + "filter_name": "VPCChanges", + "filter_pattern": "{($.eventName=CreateVpc) || ($.eventName=DeleteVpc) || ($.eventName=ModifyVpcAttribute) || ($.eventName=AcceptVpcPeeringConnection) || ($.eventName=CreateVpcPeeringConnection) || ($.eventName=DeleteVpcPeeringConnection) || ($.eventName=RejectVpcPeeringConnection) || ($.eventName=AttachClassicLinkVpc) || ($.eventName=DetachClassicLinkVpc) || ($.eventName=DisableVpcClassicLink) || ($.eventName=EnableVpcClassicLink)}", + "metric_name": "VPCChanges", + "metric_value": 1, + "alarm_name": "VPCChanges", + "alarm_desc": "Alarm for VPCChanges > 0", + "alarm_threshold": 1, +} + +Cloudwatch_mappings = { + "cis-aws-foundations-benchmark": { + "1.2.0": { + "3.1": unauthorizedAPICallsFilter, + "3.2": consoleSignInWithoutMFAFilter, + "3.3": rootAccountUsageFilter, + "3.4": iamPolicyChangesFilter, + "3.5": cloudtrailChangesFilter, + "3.6": consoleAuthenticationFailureFilter, + "3.7": disableOrDeleteCMKFilter, + "3.8": s3BucketPolicyChangesFilter, + "3.9": awsConfigChangesFilter, + "3.10": securityGroupChangesFilter, + "3.11": networkACLChangesFilter, + "3.12": networkGatewayChangesFilter, + "3.13": routeTableChangesFilter, + "3.14": vpcChangesFilter, + }, + "1.4.0": { + "4.3": rootAccountUsageFilter, + "4.4": iamPolicyChangesFilter, + "4.5": cloudtrailChangesFilter, + "4.6": consoleAuthenticationFailureFilter, + "4.7": disableOrDeleteCMKFilter, + "4.8": s3BucketPolicyChangesFilter, + "4.9": awsConfigChangesFilter, + "4.10": securityGroupChangesFilter, + "4.11": networkACLChangesFilter, + "4.12": networkGatewayChangesFilter, + "4.13": routeTableChangesFilter, + "4.14": vpcChangesFilter, + }, + }, + "security-control": { + "2.0.0": { + "CloudWatch.1": rootAccountUsageFilter, + "CloudWatch.2": unauthorizedAPICallsFilter, + "CloudWatch.3": consoleSignInWithoutMFAFilter, + "CloudWatch.4": iamPolicyChangesFilter, + "CloudWatch.5": cloudtrailChangesFilter, + "CloudWatch.6": consoleAuthenticationFailureFilter, + "CloudWatch.7": disableOrDeleteCMKFilter, + "CloudWatch.8": s3BucketPolicyChangesFilter, + "CloudWatch.9": awsConfigChangesFilter, + "CloudWatch.10": securityGroupChangesFilter, + "CloudWatch.11": networkACLChangesFilter, + "CloudWatch.12": networkGatewayChangesFilter, + "CloudWatch.13": routeTableChangesFilter, + "CloudWatch.14": vpcChangesFilter, + } + }, +} + + +def verify(event, _): + try: + standard_mapping = Cloudwatch_mappings[event["StandardLongName"]][ + event["StandardVersion"] + ] + return standard_mapping.get(event["ControlId"], None) + except KeyError as ex: + exit( + f"ERROR: Could not find associated metric filter. Missing parameter: {str(ex)}" + ) +", + }, + "name": "GetMetricFilterAndAlarmInputValue", + "outputs": [ + { + "Name": "FilterName", + "Selector": "$.Payload.filter_name", + "Type": "String", + }, + { + "Name": "FilterPattern", + "Selector": "$.Payload.filter_pattern", + "Type": "String", + }, + { + "Name": "MetricName", + "Selector": "$.Payload.metric_name", + "Type": "String", + }, + { + "Name": "MetricValue", + "Selector": "$.Payload.metric_value", + "Type": "Integer", + }, + { + "Name": "AlarmName", + "Selector": "$.Payload.alarm_name", + "Type": "String", + }, + { + "Name": "AlarmDesc", + "Selector": "$.Payload.alarm_desc", + "Type": "String", + }, + { + "Name": "AlarmThreshold", + "Selector": "$.Payload.alarm_threshold", + "Type": "Integer", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-CreateLogMetricFilterAndAlarm", + "RuntimeParameters": { + "AlarmDesc": "{{ GetMetricFilterAndAlarmInputValue.AlarmDesc }}", + "AlarmName": "{{ GetMetricFilterAndAlarmInputValue.AlarmName }}", + "AlarmThreshold": "{{ GetMetricFilterAndAlarmInputValue.AlarmThreshold }}", + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "FilterName": "{{ GetMetricFilterAndAlarmInputValue.FilterName }}", + "FilterPattern": "{{ GetMetricFilterAndAlarmInputValue.FilterPattern }}", + "KMSKeyArn": "{{ KMSKeyArn }}", + "LogGroupName": "{{ LogGroupName }}", + "MetricName": "{{ GetMetricFilterAndAlarmInputValue.MetricName }}", + "MetricNamespace": "{{ MetricNamespace }}", + "MetricValue": "{{ GetMetricFilterAndAlarmInputValue.MetricValue }}", + "SNSTopicName": "SO0111-SHARR-LocalAlarmNotification", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Added metric filter to the log group and notifications to SNS topic SO0111-ASR-LocalAlarmNotification.", + "UpdatedBy": "ASR-PCI_3.2.1_CloudWatch.1", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the CloudWatch.1 finding", + "type": "StringMap", + }, + "KMSKeyArn": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):kms:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:(?:(?:alias\\/[A-Za-z0-9/-_])|(?:key\\/(?:[0-9a-fA-F]{8}-(?:[0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12})))$", + "default": "{{ssm:/Solutions/SO0111/CMK_REMEDIATION_ARN}}", + "description": "The ARN of the KMS key created by ASR for remediations", + "type": "String", + }, + "LogGroupName": { + "allowedPattern": ".*", + "default": "{{ssm:/Solutions/SO0111/Metrics_LogGroupName}}", + "description": "The name of the Log group to be used to create filters and metric alarms", + "type": "String", + }, + "MetricNamespace": { + "allowedPattern": ".*", + "default": "LogMetrics", + "description": "The name of the metric namespace where the metrics will be logged", + "type": "String", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-CreateLogMetricFilterAndAlarm", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-PCI_3.2.1_CloudWatch.1", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksCodeBuild2A2751671": { + "Condition": "ControlRunbooksEnableCodeBuild2ConditionB01F473D", + "DependsOn": [ + "CreateWait2", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_CodeBuild.2 + +## What does this document do? +This document removes CodeBuild project environment variables containing clear text credentials and replaces them with Amazon EC2 Systems Manager Parameters. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP v1.0.0 CodeBuild.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-codebuild-2) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "CodeBuild.2", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):codebuild:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:project\\/([A-Za-z0-9][A-Za-z0-9\\-_]{1,254})$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "ProjectName", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-ReplaceCodeBuildClearTextCredentials", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "ProjectName": "{{ ParseInput.ProjectName }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Replaced clear text credentials with SSM parameters.", + "UpdatedBy": "ASR-PCI_3.2.1_CodeBuild.2", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the CodeBuild.2 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-ReplaceCodeBuildClearTextCredentials", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-PCI_3.2.1_CodeBuild.2", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksCodeBuild509682556": { + "Condition": "ControlRunbooksEnableCodeBuild5Condition5FF93A0A", + "DependsOn": [ + "CreateWait2", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_CodeBuild.5 + +## What does this document do? +This document removes CodeBuild project privileged mode to remove a build project's Docker container access to all devices. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP v1.0.0 CodeBuild.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-codebuild-5) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "CodeBuild.5", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):codebuild:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:project\\/([A-Za-z0-9][A-Za-z0-9\\-_]{1,254})$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "ProjectName", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-RemoveCodeBuildPrivilegedMode", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "ProjectName": "{{ ParseInput.ProjectName }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Removed CodeBuild privileged status.", + "UpdatedBy": "ASR-PCI_3.2.1_CodeBuild.5", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the CodeBuild.5 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-RemoveCodeBuildPrivilegedMode", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-PCI_3.2.1_CodeBuild.5", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksConfig1512B566F": { + "Condition": "ControlRunbooksEnableConfig1Condition8CEB8627", + "DependsOn": [ + "CreateWait2", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_Config.1 +## What does this document do? +Enables AWS Config: +* Turns on recording for all resources. +* Creates an encrypted bucket for Config logging. +* Creates a logging bucket for access logs for the config bucket +* Creates an SNS topic for Config notifications +* Creates a service-linked role + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Documentation Links +* [AWS FSBP Config.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-config-1) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "Config.1", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableAWSConfig", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "KMSKeyArn": "{{ KMSKeyArn }}", + "SNSTopicName": "SO0111-SHARR-AWSConfigNotification", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "AWS Config enabled", + "UpdatedBy": "ASR-PCI_3.2.1_Config.1", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the Config.1 finding", + "type": "StringMap", + }, + "KMSKeyArn": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):kms:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:(?:(?:alias\\/[A-Za-z0-9/-_])|(?:key\\/(?:[0-9a-fA-F]{8}-(?:[0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12})))$", + "default": "{{ssm:/Solutions/SO0111/CMK_REMEDIATION_ARN}}", + "description": "The ARN of the KMS key created by ASR for remediations", + "type": "String", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableAWSConfig", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-PCI_3.2.1_Config.1", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksEC213D7C9C1EB": { + "Condition": "ControlRunbooksEnableEC213Condition567EA275", + "DependsOn": [ + "CreateWait4", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-PCI_3.2.1_EC2.5 + +## What does this document do? +Removes public access to remove server administrative ports from an EC2 Security Group + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output - Output of AWS-DisablePublicAccessForSecurityGroup runbook. + +## Documentation Links +* [PCI v3.2.1 EC2.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-pci-controls.html#pcidss-ec2-5) +* [CIS v1.2.0 4.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-4.1) +* [CIS v1.2.0 4.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-4.2) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "EC2.13", + "EC2.14", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:security-group\\/(sg-[a-f\\d]{8,17})$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "GroupId", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "AWS-DisablePublicAccessForSecurityGroup", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "GroupId": "{{ ParseInput.GroupId }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Disabled public access to administrative ports in the security group {{ ParseInput.GroupId }}.", + "UpdatedBy": "ASR-PCI_3.2.1_EC2.13", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the EC2.13 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-DisablePublicAccessForSecurityGroup", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-PCI_3.2.1_EC2.13", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksEC214D3BB404": { + "Condition": "ControlRunbooksEnableEC21ConditionD4F1277B", + "DependsOn": [ + "CreateWait2", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.1 +## What does this document do? +This document changes all public EC2 snapshots to private + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Documentation Links +* [AWS FSBP EC2.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-1) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "EC2.1", + ], + "parse_id_pattern": "", + "resource_index": 2, + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "TestMode", + "Selector": "$.Payload.testmode", + "Type": "Boolean", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-MakeEBSSnapshotsPrivate", + "RuntimeParameters": { + "AccountId": "{{ ParseInput.RemediationAccount }}", + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "TestMode": "{{ ParseInput.TestMode }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "EBS Snapshot modified to private", + "UpdatedBy": "ASR-PCI_3.2.1_EC2.1", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the EC2.1 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-MakeEBSSnapshotsPrivate", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-PCI_3.2.1_EC2.1", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksEC2153B43E7A8": { + "Condition": "ControlRunbooksEnableEC215Condition52A7DE4B", + "DependsOn": [ + "CreateWait4", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.15 + +## What does this document do? +This document disables auto assignment of public IP addresses on a subnet. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP v1.0.0 EC2.15](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-15)", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "EC2.15", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "SubnetARN", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-DisablePublicIPAutoAssign", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "SubnetARN": "{{ ParseInput.SubnetARN }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Disabled public IP auto assignment for subnet.", + "UpdatedBy": "ASR-PCI_3.2.1_EC2.15", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the EC2.15 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-DisablePublicIPAutoAssign", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-PCI_3.2.1_EC2.15", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksEC218DB9589DD": { + "Condition": "ControlRunbooksEnableEC218Condition903B1C90", + "DependsOn": [ + "CreateWait4", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.18 + +## What does this document do? +This document revokes inbound security group rules that allow unrestricted access to ports that are not authorized. +Authorized ports are listed in authorizedTcpPorts and authorizedUdpPorts parameters. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AFSBP v1.0.0 EC2.18](https://docs.aws.amazon.com/securityhub/latest/userguide/ec2-controls.html#ec2-18)", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "EC2.18", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:security-group/(sg-[0-9a-f]*)$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "SecurityGroupId", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeScript", + "inputs": { + "Handler": "get_input_params", + "InputPayload": { + "DefaultParams": { + "authorizedTcpPorts": [ + "80", + "443", + ], + "authorizedUdpPorts": [], + }, + "SecHubInputParams": "{{ ParseInput.InputParams }}", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import re + + +def parse_non_string_types(param): + if re.match("^\\d+$", str(param)): + param = int(param) + return param + if param == "true" or param == "True": + return True + if param == "false" or param == "False": + return False + if isinstance(param, list): + return param + if len(param.split(",")) > 1: + return param.split(",") + return param + + +def get_input_params(event, _): + security_hub_input_params = event["SecHubInputParams"] + + default_params = event["DefaultParams"] + + input_params = {} + + for param in default_params: + if param in security_hub_input_params: + converted_param = parse_non_string_types(security_hub_input_params[param]) + input_params[param] = converted_param + else: + converted_param = parse_non_string_types(default_params[param]) + input_params[param] = converted_param + + return input_params +", + }, + "name": "GetInputParams", + "outputs": [ + { + "Name": "authorizedTcpPorts", + "Selector": "$.Payload.authorizedTcpPorts", + "Type": "StringList", + }, + { + "Name": "authorizedUdpPorts", + "Selector": "$.Payload.authorizedUdpPorts", + "Type": "StringList", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-RevokeUnauthorizedInboundRules", + "RuntimeParameters": { + "AuthorizedTcpPorts": "{{ GetInputParams.authorizedTcpPorts }}", + "AuthorizedUdpPorts": "{{ GetInputParams.authorizedUdpPorts }}", + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "SecurityGroupId": "{{ ParseInput.SecurityGroupId }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Revoked unrestricted inbound security group rules on unauthorized ports.", + "UpdatedBy": "ASR-PCI_3.2.1_EC2.18", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the EC2.18 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-RevokeUnauthorizedInboundRules", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-PCI_3.2.1_EC2.18", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksEC2197047C726": { + "Condition": "ControlRunbooksEnableEC219Condition2421DE99", + "DependsOn": [ + "CreateWait4", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.19 + +## What does this document do? +This document disables unrestricted access to high risk ports. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AFSBP v1.0.0 EC2.19](https://docs.aws.amazon.com/securityhub/latest/userguide/ec2-controls.html#ec2-19)", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "EC2.19", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:security-group/(sg-[0-9a-f]*)$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "SecurityGroupId", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-DisableUnrestrictedAccessToHighRiskPorts", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "SecurityGroupId": "{{ ParseInput.SecurityGroupId }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Revoking access to high risk ports.", + "UpdatedBy": "ASR-PCI_3.2.1_EC2.19", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the EC2.19 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-DisableUnrestrictedAccessToHighRiskPorts", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-PCI_3.2.1_EC2.19", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksEC223EAFC5818": { + "Condition": "ControlRunbooksEnableEC223Condition795CB580", + "DependsOn": [ + "CreateWait4", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.23 + +## What does this document do? +This document turns off AutoAcceptSharedAttachments on a transit gateway to ensure that only authorized VPC attachment requests are accepted. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AFSBP v1.0.0 EC2.23](https://docs.aws.amazon.com/securityhub/latest/userguide/ec2-controls.html#ec2-23)", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "EC2.23", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):ec2:[a-z]{2}-[a-z]+-\\d{1}:\\d{12}:transit-gateway\\/(tgw-[a-z0-9\\-]+)$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "TransitGatewayId", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-DisableTGWAutoAcceptSharedAttachments", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "TransitGatewayId": "{{ ParseInput.TransitGatewayId }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Disabling Transit Gateway from automatically accepting VPC attachment requests.", + "UpdatedBy": "ASR-PCI_3.2.1_EC2.23", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the EC2.23 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-DisableTGWAutoAcceptSharedAttachments", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-PCI_3.2.1_EC2.23", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksEC22ED852ADF": { + "Condition": "ControlRunbooksEnableEC22ConditionB9E0D42E", + "DependsOn": [ + "CreateWait3", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.2 + +## What does this document do? +This document deletes ingress and egress rules from default security +group using the AWS SSM Runbook AWSConfigRemediation-RemoveVPCDefaultSecurityGroupRules + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output - Output from AWSConfigRemediation-RemoveVPCDefaultSecurityGroupRules SSM doc + +## Documentation Links +* [AWS FSBP EC2.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-2) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "EC2.2", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:security-group\\/(sg-[0-9a-f]*)$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "GroupId", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-RemoveVPCDefaultSecurityGroupRules", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "GroupId": "{{ ParseInput.GroupId }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Removed rules on default security group", + "UpdatedBy": "ASR-PCI_3.2.1_EC2.2", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the EC2.2 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-RemoveVPCDefaultSecurityGroupRules", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-PCI_3.2.1_EC2.2", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksEC247C182546": { + "Condition": "ControlRunbooksEnableEC24Condition72408A1B", + "DependsOn": [ + "CreateWait3", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.4 + +## What does this document do? +This document terminates an EC2 instance if it has been stopped for longer than the allowed number of days defined by the AllowedDays parameter. + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AFSBP v1.0.0 EC2.4](https://docs.aws.amazon.com/securityhub/latest/userguide/ec2-controls.html#ec2-4)", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "EC2.4", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:instance\\/(i-[0-9a-f]*)$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "InstanceId", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "AWS-TerminateEC2Instance", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "InstanceId": [ + "{{ ParseInput.InstanceId }}", + ], + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Terminated EC2 instance.", + "UpdatedBy": "ASR-PCI_3.2.1_EC2.4", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the EC2.4 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-TerminateEC2Instance", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-PCI_3.2.1_EC2.4", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksEC267E3087AE": { + "Condition": "ControlRunbooksEnableEC26ConditionF1F880B0", + "DependsOn": [ + "CreateWait3", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.6 + +## What does this document do? +Enables VPC Flow Logs for a VPC + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output - Remediation results + +## Documentation Links +* [AWS FSBP EC2.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-6) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "EC2.6", + ], + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):ec2:.*:\\d{12}:vpc\\/(vpc-[0-9a-f]{8,17})$", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "VPC", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableVPCFlowLogs", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "RemediationRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/SO0111-EnableVPCFlowLogs-remediationRole", + "VPC": "{{ ParseInput.VPC }}", + }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Enabled VPC Flow logging.", + "UpdatedBy": "ASR-PCI_3.2.1_EC2.6", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the EC2.6 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableVPCFlowLogs", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-PCI_3.2.1_EC2.6", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksEC277719A4CD": { + "Condition": "ControlRunbooksEnableEC27ConditionC77CF056", + "DependsOn": [ + "CreateWait3", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.7 +## What does this document do? +This document enables \`EBS Encryption by default\` for an AWS account in the current region by calling another SSM document +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP EC2.7](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-7) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "EC2.7", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) + + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) + + def __str__(self): + return json.dumps(self.__dict__) + + +""" +MAIN +""" + + +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) + + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") + + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", + }, + "name": "ParseInput", + "outputs": [ + { + "Name": "FindingId", + "Selector": "$.Payload.finding_id", + "Type": "String", + }, + { + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", + "Type": "String", + }, + { + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", + }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + ], + }, + { + "action": "aws:executeAutomation", + "inputs": { + "DocumentName": "ASR-EnableEbsEncryptionByDefault", + "RuntimeParameters": { + "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + }, + }, + "name": "Remediation", + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "BatchUpdateFindings", + "FindingIdentifiers": [ + { + "Id": "{{ ParseInput.FindingId }}", + "ProductArn": "{{ ParseInput.ProductArn }}", + }, + ], + "Note": { + "Text": "Enabled EBS encryption by default", + "UpdatedBy": "ASR-PCI_3.2.1_EC2.7", + }, + "Service": "securityhub", + "Workflow": { + "Status": "RESOLVED", + }, + }, + "isEnd": true, + "name": "UpdateFinding", + }, + ], + "outputs": [ + "Remediation.Output", + "ParseInput.AffectedObject", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role\\/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "Finding": { + "description": "The input from the Orchestrator Step function for the EC2.7 finding", + "type": "StringMap", + }, + "RemediationRoleName": { + "allowedPattern": "^[\\w+=,.@-]+$", + "default": "SO0111-EnableEbsEncryptionByDefault", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-PCI_3.2.1_EC2.7", + "Tags": [ + { + "Key": "CdkGenerated", + "Value": "true", + }, + ], + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ControlRunbooksEC287C39A9F1": { + "Condition": "ControlRunbooksEnableEC28Condition4C4640B8", + "DependsOn": [ + "CreateWait3", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.8 +## What does this document do? +This document enables IMDSv2 on an Instance for an AWS account in the current region by calling another SSM document. +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AWS FSBP EC2.8](https://docs.aws.amazon.com/securityhub/latest/userguide/ec2-controls.html#ec2-8) +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "parse_event", + "InputPayload": { + "Finding": "{{ Finding }}", + "expected_control_id": [ + "EC2.8", + ], + "parse_id_pattern": "", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import json +import re +from typing import Any + +import boto3 +from botocore.config import Config + + +def connect_to_config(boto_config): + return boto3.client("config", config=boto_config) + + +def connect_to_ssm(boto_config): + return boto3.client("ssm", config=boto_config) + + +def get_solution_id(): + return "SO0111" + + +def get_solution_version(): + ssm = connect_to_ssm( + Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", + ) + ) + solution_version = "unknown" + try: + ssm_parm_value = ssm.get_parameter( + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") + solution_version = ssm_parm_value + except Exception as e: + print(e) + print("ERROR getting solution version") + return solution_version + + +def get_shortname(long_name): + short_name = { + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", + } + return short_name.get(long_name, None) + + +def get_config_rule(rule_name): + boto_config = Config( + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", + ) + config_rule = None + try: + configsvc = connect_to_config(boto_config) + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] + except Exception as e: + print(e) + exit(f"ERROR getting config rule {rule_name}") + return config_rule + + +class FindingEvent: + """ + Finding object returns the parse fields from an input finding json object + """ + + def _get_resource_id(self, parse_id_pattern, resource_index): + identifier_raw = self.finding_json["Resources"][0]["Id"] + self.resource_id = identifier_raw + self.resource_id_matches = [] + + if parse_id_pattern: + identifier_match = re.match(parse_id_pattern, identifier_raw) + + if identifier_match: + for group in range(1, len(identifier_match.groups()) + 1): + self.resource_id_matches.append(identifier_match.group(group)) + self.resource_id = identifier_match.group(resource_index) + else: + exit(f"ERROR: Invalid resource Id {identifier_raw}") + + def _get_sc_check(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname("security-control") + self.control_id = match_finding_id.group(1) + + return match_finding_id + + def _get_standard_info(self): + match_finding_id = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], + ) + if match_finding_id: + self.standard_id = get_shortname(match_finding_id.group(1)) + self.standard_version = match_finding_id.group(2) + self.control_id = match_finding_id.group(3) + else: + match_sc_finding_id = self._get_sc_check() + if not match_sc_finding_id: + self.valid_finding = False + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) + + def _get_aws_config_rule(self): + # config_rule_id refers to the AWS Config Rule that produced the finding + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] + self.aws_config_rule = get_config_rule(self.aws_config_rule_id) + + def _get_region_from_resource_id(self): + check_for_region = re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], + ) + if check_for_region: + self.resource_region = check_for_region.group(1) + + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): + self.valid_finding = True + self.resource_region = None + self.control_id = None + self.aws_config_rule_id = None + self.aws_config_rule = {} + self.input_params = {} + + """Populate fields""" + # v1.5 + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + + # V1.4 + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) + # Test mode is used with fabricated finding data to tell the + # remediation runbook to run in test more (where supported) + # Currently not widely-used and perhaps should be deprecated. + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] + self._get_region_from_resource_id() + self._get_aws_config_rule() + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } + + # Validate control_id + if not self.control_id: + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value + if self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" + + if not self.resource_id and self.valid_finding: + self.valid_finding = False + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) -securityGroupChangesFilter = { - "filter_name": "SecurityGroupChanges", - "filter_pattern": '{($.eventName=AuthorizeSecurityGroupIngress) || ($.eventName=AuthorizeSecurityGroupEgress) || ($.eventName=RevokeSecurityGroupIngress) || ($.eventName=RevokeSecurityGroupEgress) || ($.eventName=CreateSecurityGroup) || ($.eventName=DeleteSecurityGroup)}', - "metric_name": "SecurityGroupChanges", - "metric_value": 1, - "alarm_name": "SecurityGroupChanges", - "alarm_desc": "Alarm for SecurityGroupChanges > 0", - "alarm_threshold": 1 -} + if not self.valid_finding: + # Error message and return error data + msg = f"ERROR: {self.invalid_finding_reason}" + exit(msg) -networkACLChangesFilter = { - "filter_name": "NetworkACLChanges", - "filter_pattern": '{($.eventName=CreateNetworkAcl) || ($.eventName=CreateNetworkAclEntry) || ($.eventName=DeleteNetworkAcl) || ($.eventName=DeleteNetworkAclEntry) || ($.eventName=ReplaceNetworkAclEntry) || ($.eventName=ReplaceNetworkAclAssociation)}', - "metric_name": "NetworkACLChanges", - "metric_value": 1, - "alarm_name": "NetworkACLChanges", - "alarm_desc": "Alarm for NetworkACLChanges > 0", - "alarm_threshold": 1 -} + def __str__(self): + return json.dumps(self.__dict__) -networkGatewayChangesFilter = { - "filter_name": "NetworkGatewayChanges", - "filter_pattern": '{($.eventName=CreateCustomerGateway) || ($.eventName=DeleteCustomerGateway) || ($.eventName=AttachInternetGateway) || ($.eventName=CreateInternetGateway) || ($.eventName=DeleteInternetGateway) || ($.eventName=DetachInternetGateway)}', - "metric_name": "NetworkGatewayChanges", - "metric_value": 1, - "alarm_name": "NetworkGatewayChanges", - "alarm_desc": "Alarm for NetworkGatewayChanges > 0", - "alarm_threshold": 1 -} -routeTableChangesFilter = { - "filter_name": "RouteTableChanges", - "filter_pattern": '{($.eventName=CreateRoute) || ($.eventName=CreateRouteTable) || ($.eventName=ReplaceRoute) || ($.eventName=ReplaceRouteTableAssociation) || ($.eventName=DeleteRouteTable) || ($.eventName=DeleteRoute) || ($.eventName=DisassociateRouteTable)}', - "metric_name": "RouteTableChanges", - "metric_value": 1, - "alarm_name": "RouteTableChanges", - "alarm_desc": "Alarm for RouteTableChanges > 0", - "alarm_threshold": 1 -} +""" +MAIN +""" -vpcChangesFilter = { - "filter_name": "VPCChanges", - "filter_pattern": '{($.eventName=CreateVpc) || ($.eventName=DeleteVpc) || ($.eventName=ModifyVpcAttribute) || ($.eventName=AcceptVpcPeeringConnection) || ($.eventName=CreateVpcPeeringConnection) || ($.eventName=DeleteVpcPeeringConnection) || ($.eventName=RejectVpcPeeringConnection) || ($.eventName=AttachClassicLinkVpc) || ($.eventName=DetachClassicLinkVpc) || ($.eventName=DisableVpcClassicLink) || ($.eventName=EnableVpcClassicLink)}', - "metric_name": "VPCChanges", - "metric_value": 1, - "alarm_name": "VPCChanges", - "alarm_desc": "Alarm for VPCChanges > 0", - "alarm_threshold": 1 -} -Cloudwatch_mappings = { - 'cis-aws-foundations-benchmark': { - '1.2.0': { - '3.1': unauthorizedAPICallsFilter, - '3.2': consoleSignInWithoutMFAFilter, - '3.3': rootAccountUsageFilter, - '3.4': iamPolicyChangesFilter, - '3.5': cloudtrailChangesFilter, - '3.6': consoleAuthenticationFailureFilter, - '3.7': disableOrDeleteCMKFilter, - '3.8': s3BucketPolicyChangesFilter, - '3.9': awsConfigChangesFilter, - '3.10': securityGroupChangesFilter, - '3.11': networkACLChangesFilter, - '3.12': networkGatewayChangesFilter, - '3.13': routeTableChangesFilter, - '3.14': vpcChangesFilter - }, - '1.4.0': { - '4.3': rootAccountUsageFilter, - '4.4': iamPolicyChangesFilter, - '4.5': cloudtrailChangesFilter, - '4.6': consoleAuthenticationFailureFilter, - '4.7': disableOrDeleteCMKFilter, - '4.8': s3BucketPolicyChangesFilter, - '4.9': awsConfigChangesFilter, - '4.10': securityGroupChangesFilter, - '4.11': networkACLChangesFilter, - '4.12': networkGatewayChangesFilter, - '4.13': routeTableChangesFilter, - '4.14': vpcChangesFilter - } - }, - 'security-control': { - '2.0.0': { - "CloudWatch.1": rootAccountUsageFilter, - "CloudWatch.2": unauthorizedAPICallsFilter, - "CloudWatch.3": consoleSignInWithoutMFAFilter, - "CloudWatch.4": iamPolicyChangesFilter, - "CloudWatch.5": cloudtrailChangesFilter, - "CloudWatch.6": consoleAuthenticationFailureFilter, - "CloudWatch.7": disableOrDeleteCMKFilter, - "CloudWatch.8": s3BucketPolicyChangesFilter, - "CloudWatch.9": awsConfigChangesFilter, - "CloudWatch.10": securityGroupChangesFilter, - "CloudWatch.11": networkACLChangesFilter, - "CloudWatch.12": networkGatewayChangesFilter, - "CloudWatch.13": routeTableChangesFilter, - "CloudWatch.14": vpcChangesFilter - } - } -} +def parse_event(event, _): + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) -def verify(event, _): - try: - standard_mapping = Cloudwatch_mappings.get(event['StandardLongName']).get(event['StandardVersion']) - return standard_mapping.get(event['ControlId'], None) - except KeyError as ex: - exit(f'ERROR: Could not find associated metric filter. Missing parameter: {str(ex)}') + if not finding_event.valid_finding: + exit("ERROR: Finding is not valid") - ", + return { + "account_id": finding_event.account_id, + "resource_id": finding_event.resource_id, + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "control_id": finding_event.control_id, + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "object": finding_event.affected_object, + "matches": finding_event.resource_id_matches, + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "resource": finding_event.resource, + "resource_region": finding_event.resource_region, + "finding": finding_event.finding_json, + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, + } +", }, - "name": "GetMetricFilterAndAlarmInputValue", + "name": "ParseInput", "outputs": [ { - "Name": "FilterName", - "Selector": "$.Payload.filter_name", - "Type": "String", - }, - { - "Name": "FilterPattern", - "Selector": "$.Payload.filter_pattern", + "Name": "FindingId", + "Selector": "$.Payload.finding_id", "Type": "String", }, { - "Name": "MetricName", - "Selector": "$.Payload.metric_name", + "Name": "ProductArn", + "Selector": "$.Payload.product_arn", "Type": "String", }, { - "Name": "MetricValue", - "Selector": "$.Payload.metric_value", - "Type": "Integer", + "Name": "AffectedObject", + "Selector": "$.Payload.object", + "Type": "StringMap", }, { - "Name": "AlarmName", - "Selector": "$.Payload.alarm_name", - "Type": "String", + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", }, { - "Name": "AlarmDesc", - "Selector": "$.Payload.alarm_desc", + "Name": "InstanceARN", + "Selector": "$.Payload.resource_id", "Type": "String", }, - { - "Name": "AlarmThreshold", - "Selector": "$.Payload.alarm_threshold", - "Type": "Integer", - }, ], }, { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-CreateLogMetricFilterAndAlarm", + "DocumentName": "ASR-EnableIMDSV2OnInstance", "RuntimeParameters": { - "AlarmDesc": "{{ GetMetricFilterAndAlarmInputValue.AlarmDesc }}", - "AlarmName": "{{ GetMetricFilterAndAlarmInputValue.AlarmName }}", - "AlarmThreshold": "{{ GetMetricFilterAndAlarmInputValue.AlarmThreshold }}", "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "FilterName": "{{ GetMetricFilterAndAlarmInputValue.FilterName }}", - "FilterPattern": "{{ GetMetricFilterAndAlarmInputValue.FilterPattern }}", - "KMSKeyArn": "{{ KMSKeyArn }}", - "LogGroupName": "{{ LogGroupName }}", - "MetricName": "{{ GetMetricFilterAndAlarmInputValue.MetricName }}", - "MetricNamespace": "{{ MetricNamespace }}", - "MetricValue": "{{ GetMetricFilterAndAlarmInputValue.MetricValue }}", - "SNSTopicName": "SO0111-SHARR-LocalAlarmNotification", + "InstanceARN": "{{ ParseInput.InstanceARN }}", }, }, "name": "Remediation", @@ -4540,8 +11659,8 @@ def verify(event, _): }, ], "Note": { - "Text": "Added metric filter to the log group and notifications to SNS topic SO0111-ASR-LocalAlarmNotification.", - "UpdatedBy": "ASR-PCI_3.2.1_CloudWatch.1", + "Text": "Enabled IMDSv2 on Instance", + "UpdatedBy": "ASR-PCI_3.2.1_EC2.8", }, "Service": "securityhub", "Workflow": { @@ -4563,30 +11682,12 @@ def verify(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the CloudWatch.1 finding", + "description": "The input from the Orchestrator Step function for the EC2.8 finding", "type": "StringMap", }, - "KMSKeyArn": { - "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):kms:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:(?:(?:alias\\/[A-Za-z0-9/-_])|(?:key\\/(?:[0-9a-fA-F]{8}-(?:[0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12})))$", - "default": "{{ssm:/Solutions/SO0111/CMK_REMEDIATION_ARN}}", - "description": "The ARN of the KMS key created by ASR for remediations", - "type": "String", - }, - "LogGroupName": { - "allowedPattern": ".*", - "default": "{{ssm:/Solutions/SO0111/Metrics_LogGroupName}}", - "description": "The name of the Log group to be used to create filters and metric alarms", - "type": "String", - }, - "MetricNamespace": { - "allowedPattern": ".*", - "default": "LogMetrics", - "description": "The name of the metric namespace where the metrics will be logged", - "type": "String", - }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-CreateLogMetricFilterAndAlarm", + "default": "SO0111-EnableIMDSV2OnInstance", "type": "String", }, }, @@ -4594,7 +11695,7 @@ def verify(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_CloudWatch.1", + "Name": "ASR-PCI_3.2.1_EC2.8", "Tags": [ { "Key": "CdkGenerated", @@ -4605,29 +11706,28 @@ def verify(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksCodeBuild2A2751671": { - "Condition": "ControlRunbooksEnableCodeBuild2ConditionB01F473D", + "ControlRunbooksECR16DEF82C5": { + "Condition": "ControlRunbooksEnableECR1Condition70BCAF70", "DependsOn": [ - "CreateWait1", + "CreateWait5", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.0_CodeBuild.2 - + "description": "### Document Name - ASR-AFSBP_1.0.0_ECR.1 + ## What does this document do? -This document removes CodeBuild project environment variables containing clear text credentials and replaces them with Amazon EC2 Systems Manager Parameters. - +This document enables image scanning configuration on a private ECR repository. + ## Input Parameters * Finding: (Required) Security Hub finding details JSON * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. - + ## Output Parameters * Remediation.Output - + ## Documentation Links -* [AFSBP v1.0.0 CodeBuild.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-codebuild-2) -", +* [AFSBP v1.0.0 ECR.1](https://docs.aws.amazon.com/securityhub/latest/userguide/ecr-controls.html#ecr-1)", "mainSteps": [ { "action": "aws:executeScript", @@ -4636,113 +11736,116 @@ This document removes CodeBuild project environment variables containing clear t "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "CodeBuild.2", + "ECR.1", ], - "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):codebuild:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:project\\/([A-Za-z0-9][A-Za-z0-9\\-_]{1,254})$", + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):ecr:[a-z]{2}-[a-z]+-\\d{1}:\\d{12}:repository\\/([a-z0-9._\\/\\-]+)$", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -4752,101 +11855,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -4868,18 +12012,13 @@ def parse_event(event, _): "Type": "StringMap", }, { - "Name": "ProjectName", - "Selector": "$.Payload.resource_id", - "Type": "String", - }, - { - "Name": "RemediationAccount", - "Selector": "$.Payload.account_id", - "Type": "String", + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", }, { - "Name": "RemediationRegion", - "Selector": "$.Payload.resource_region", + "Name": "RepositoryName", + "Selector": "$.Payload.resource_id", "Type": "String", }, ], @@ -4887,22 +12026,11 @@ def parse_event(event, _): { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-ReplaceCodeBuildClearTextCredentials", + "DocumentName": "ASR-EnablePrivateRepositoryScanning", "RuntimeParameters": { "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "ProjectName": "{{ ParseInput.ProjectName }}", + "RepositoryName": "{{ ParseInput.RepositoryName }}", }, - "TargetLocations": [ - { - "Accounts": [ - "{{ ParseInput.RemediationAccount }}", - ], - "ExecutionRoleName": "{{ RemediationRoleName }}", - "Regions": [ - "{{ ParseInput.RemediationRegion }}", - ], - }, - ], }, "name": "Remediation", }, @@ -4917,8 +12045,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Replaced clear text credentials with SSM parameters.", - "UpdatedBy": "ASR-PCI_3.2.1_CodeBuild.2", + "Text": "Enabling image scanning for private ECR repository.", + "UpdatedBy": "ASR-PCI_3.2.1_ECR.1", }, "Service": "securityhub", "Workflow": { @@ -4940,12 +12068,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the CodeBuild.2 finding", + "description": "The input from the Orchestrator Step function for the ECR.1 finding", "type": "StringMap", }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-ReplaceCodeBuildClearTextCredentials", + "default": "SO0111-EnablePrivateRepositoryScanning", "type": "String", }, }, @@ -4953,7 +12081,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_CodeBuild.2", + "Name": "ASR-PCI_3.2.1_ECR.1", "Tags": [ { "Key": "CdkGenerated", @@ -4964,30 +12092,28 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksConfig1512B566F": { - "Condition": "ControlRunbooksEnableConfig1Condition8CEB8627", + "ControlRunbooksGuardDuty15E0D2BEA": { + "Condition": "ControlRunbooksEnableGuardDuty1Condition97849740", "DependsOn": [ - "CreateWait2", + "CreateWait5", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.0_Config.1 + "description": "### Document Name - ASR-AFSBP_1.0.0_GuardDuty.1 + ## What does this document do? -Enables AWS Config: -* Turns on recording for all resources. -* Creates an encrypted bucket for Config logging. -* Creates a logging bucket for access logs for the config bucket -* Creates an SNS topic for Config notifications -* Creates a service-linked role - +This document enables GuardDuty. + ## Input Parameters * Finding: (Required) Security Hub finding details JSON * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. - + +## Output Parameters +* Remediation.Output + ## Documentation Links -* [AFSBP Config.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-config-1) -", +* [AWS FSBP v1.0.0 GuardDuty.1](https://docs.aws.amazon.com/securityhub/latest/userguide/guardduty-controls.html#guardduty-1)", "mainSteps": [ { "action": "aws:executeScript", @@ -4996,113 +12122,116 @@ Enables AWS Config: "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "Config.1", + "GuardDuty.1", ], "parse_id_pattern": "", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -5112,101 +12241,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -5227,16 +12397,19 @@ def parse_event(event, _): "Selector": "$.Payload.object", "Type": "StringMap", }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, ], }, { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-EnableAWSConfig", + "DocumentName": "ASR-EnableGuardDuty", "RuntimeParameters": { "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "KMSKeyArn": "{{ KMSKeyArn }}", - "SNSTopicName": "SO0111-SHARR-AWSConfigNotification", }, }, "name": "Remediation", @@ -5252,8 +12425,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "AWS Config enabled", - "UpdatedBy": "ASR-PCI_3.2.1_Config.1", + "Text": "Amazon GuardDuty enabled.", + "UpdatedBy": "ASR-PCI_3.2.1_GuardDuty.1", }, "Service": "securityhub", "Workflow": { @@ -5275,18 +12448,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the Config.1 finding", + "description": "The input from the Orchestrator Step function for the GuardDuty.1 finding", "type": "StringMap", }, - "KMSKeyArn": { - "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):kms:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:(?:(?:alias\\/[A-Za-z0-9/-_])|(?:key\\/(?:[0-9a-fA-F]{8}-(?:[0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12})))$", - "default": "{{ssm:/Solutions/SO0111/CMK_REMEDIATION_ARN}}", - "description": "The ARN of the KMS key created by ASR for remediations", - "type": "String", - }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-EnableAWSConfig", + "default": "SO0111-EnableGuardDuty", "type": "String", }, }, @@ -5294,7 +12461,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_Config.1", + "Name": "ASR-PCI_3.2.1_GuardDuty.1", "Tags": [ { "Key": "CdkGenerated", @@ -5305,30 +12472,28 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksEC213D7C9C1EB": { - "Condition": "ControlRunbooksEnableEC213Condition567EA275", + "ControlRunbooksIAM18ACE62321": { + "Condition": "ControlRunbooksEnableIAM18ConditionC6288150", "DependsOn": [ - "CreateWait3", + "CreateWait6", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-PCI_3.2.1_EC2.5 + "description": "### Document Name - ASR-CIS_1.2.0_1.20 ## What does this document do? -Removes public access to remove server administrative ports from an EC2 Security Group +Creates a support role to allow AWS Support access. ## Input Parameters * Finding: (Required) Security Hub finding details JSON * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. ## Output Parameters -* Remediation.Output - Output of AWS-DisablePublicAccessForSecurityGroup runbook. +* Remediation.Output - Output of CreateRole API. ## Documentation Links -* [PCI v3.2.1 EC2.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-pci-controls.html#pcidss-ec2-5) -* [CIS v1.2.0 4.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-4.1) -* [CIS v1.2.0 4.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-4.2) +* [CIS v1.2.0 1.20](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-1.20) ", "mainSteps": [ { @@ -5338,114 +12503,116 @@ Removes public access to remove server administrative ports from an EC2 Security "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "EC2.13", - "EC2.14", + "IAM.18", ], - "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:security-group\\/(sg-[a-f\\d]{8,17})$", + "parse_id_pattern": "", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -5455,101 +12622,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -5571,29 +12779,18 @@ def parse_event(event, _): "Type": "StringMap", }, { - "Name": "GroupId", - "Selector": "$.Payload.resource_id", - "Type": "String", - }, - { - "Name": "RemediationAccount", - "Selector": "$.Payload.account_id", - "Type": "String", - }, - { - "Name": "RemediationRegion", - "Selector": "$.Payload.resource_region", - "Type": "String", + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", }, ], }, { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "AWS-DisablePublicAccessForSecurityGroup", + "DocumentName": "ASR-CreateIAMSupportRole", "RuntimeParameters": { "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "GroupId": "{{ ParseInput.GroupId }}", }, }, "name": "Remediation", @@ -5609,8 +12806,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Disabled public access to administrative ports in the security group {{ ParseInput.GroupId }}.", - "UpdatedBy": "ASR-PCI_3.2.1_EC2.13", + "Text": "Create an IAM role to allow authorized users to manage incidents with AWS Support using the ASR-CreateIAMSupportRole runbook.", + "UpdatedBy": "ASR-PCI_3.2.1_IAM.18", }, "Service": "securityhub", "Workflow": { @@ -5632,12 +12829,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the EC2.13 finding", + "description": "The input from the Orchestrator Step function for the IAM.18 finding", "type": "StringMap", }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-DisablePublicAccessForSecurityGroup", + "default": "SO0111-CreateIAMSupportRole", "type": "String", }, }, @@ -5645,7 +12842,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_EC2.13", + "Name": "ASR-PCI_3.2.1_IAM.18", "Tags": [ { "Key": "CdkGenerated", @@ -5656,24 +12853,30 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksEC214D3BB404": { - "Condition": "ControlRunbooksEnableEC21ConditionD4F1277B", + "ControlRunbooksIAM2280FCB95D": { + "Condition": "ControlRunbooksEnableIAM22Condition387158E7", "DependsOn": [ - "CreateWait2", + "CreateWait6", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.1 + "description": "### Document Name - ASR-AFSBP_1.0.0_IAM.8 + ## What does this document do? -This document changes all public EC2 snapshots to private +This document ensures that credentials unused for 90 days or greater are disabled. ## Input Parameters * Finding: (Required) Security Hub finding details JSON * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +## Output Parameters +* Remediation.Output - Output of remediation runbook + +SEE AWSConfigRemediation-RevokeUnusedIAMUserCredentials + ## Documentation Links -* [AFSBP EC2.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-1) +* [AWS FSBP IAM.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-iam-8) ", "mainSteps": [ { @@ -5683,114 +12886,116 @@ This document changes all public EC2 snapshots to private "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "EC2.1", + "IAM.22", ], "parse_id_pattern": "", - "resource_index": 2, }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -5800,101 +13005,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -5916,25 +13162,25 @@ def parse_event(event, _): "Type": "StringMap", }, { - "Name": "RemediationAccount", - "Selector": "$.Payload.account_id", - "Type": "String", + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", }, { - "Name": "TestMode", - "Selector": "$.Payload.testmode", - "Type": "Boolean", + "Name": "IAMResourceId", + "Selector": "$.Payload.details.AwsIamUser.UserId", + "Type": "String", }, ], }, { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-MakeEBSSnapshotsPrivate", + "DocumentName": "ASR-RevokeUnusedIAMUserCredentials", "RuntimeParameters": { - "AccountId": "{{ ParseInput.RemediationAccount }}", "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "TestMode": "{{ ParseInput.TestMode }}", + "IAMResourceId": "{{ ParseInput.IAMResourceId }}", + "MaxCredentialUsageAge": "45", }, }, "name": "Remediation", @@ -5950,8 +13196,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "EBS Snapshot modified to private", - "UpdatedBy": "ASR-PCI_3.2.1_EC2.1", + "Text": "Deactivated unused keys and expired logins using the ASR-RevokeUnusedIAMUserCredentials runbook.", + "UpdatedBy": "ASR-PCI_3.2.1_IAM.22", }, "Service": "securityhub", "Workflow": { @@ -5973,12 +13219,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the EC2.1 finding", + "description": "The input from the Orchestrator Step function for the IAM.22 finding", "type": "StringMap", }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-MakeEBSSnapshotsPrivate", + "default": "SO0111-RevokeUnusedIAMUserCredentials", "type": "String", }, }, @@ -5986,7 +13232,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_EC2.1", + "Name": "ASR-PCI_3.2.1_IAM.22", "Tags": [ { "Key": "CdkGenerated", @@ -5997,28 +13243,29 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksEC2153B43E7A8": { - "Condition": "ControlRunbooksEnableEC215Condition52A7DE4B", + "ControlRunbooksIAM3DC25477E": { + "Condition": "ControlRunbooksEnableIAM3Condition3AA0E892", "DependsOn": [ - "CreateWait3", + "CreateWait5", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.15 - + "description": "### Document Name - ASR-AFSBP_1.0.0_IAM.3 + ## What does this document do? -This document disables auto assignment of public IP addresses on a subnet. - +This document disables active keys that have not been rotated for more than 90 days. Note that this remediation is **DISRUPTIVE**. + ## Input Parameters * Finding: (Required) Security Hub finding details JSON * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. - + ## Output Parameters * Remediation.Output - + ## Documentation Links -* [AFSBP v1.0.0 EC2.15](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-15)", +* [AWS FSBP v1.0.0 IAM.3](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-iam-3) +", "mainSteps": [ { "action": "aws:executeScript", @@ -6027,113 +13274,116 @@ This document disables auto assignment of public IP addresses on a subnet. "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "EC2.15", + "IAM.3", ], - "parse_id_pattern": "", + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):iam::\\d{12}:user(?:(?:\\u002F)|(?:\\u002F[\\u0021-\\u007F]{1,510}\\u002F))([\\w+=,.@-]{1,64})$", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -6143,101 +13393,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -6259,18 +13550,18 @@ def parse_event(event, _): "Type": "StringMap", }, { - "Name": "SubnetARN", - "Selector": "$.Payload.resource_id", - "Type": "String", + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", }, { - "Name": "RemediationAccount", - "Selector": "$.Payload.account_id", + "Name": "IAMUser", + "Selector": "$.Payload.resource_id", "Type": "String", }, { - "Name": "RemediationRegion", - "Selector": "$.Payload.resource_region", + "Name": "IAMResourceId", + "Selector": "$.Payload.details.AwsIamUser.UserId", "Type": "String", }, ], @@ -6278,22 +13569,12 @@ def parse_event(event, _): { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-DisablePublicIPAutoAssign", + "DocumentName": "ASR-RevokeUnrotatedKeys", "RuntimeParameters": { "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "SubnetARN": "{{ ParseInput.SubnetARN }}", + "IAMResourceId": "{{ ParseInput.IAMResourceId }}", + "MaxCredentialUsageAge": "{{ MaxCredentialUsageAge }}", }, - "TargetLocations": [ - { - "Accounts": [ - "{{ ParseInput.RemediationAccount }}", - ], - "ExecutionRoleName": "{{ RemediationRoleName }}", - "Regions": [ - "{{ ParseInput.RemediationRegion }}", - ], - }, - ], }, "name": "Remediation", }, @@ -6308,8 +13589,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Disabled public IP auto assignment for subnet.", - "UpdatedBy": "ASR-PCI_3.2.1_EC2.15", + "Text": "Deactivated unrotated keys for {{ ParseInput.IAMUser }}.", + "UpdatedBy": "ASR-PCI_3.2.1_IAM.3", }, "Service": "securityhub", "Workflow": { @@ -6331,12 +13612,18 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the EC2.15 finding", + "description": "The input from the Orchestrator Step function for the IAM.3 finding", "type": "StringMap", }, + "MaxCredentialUsageAge": { + "allowedPattern": "^(?:[1-9]\\d{0,3}|10000)$", + "default": "90", + "description": "(Required) Maximum number of days a key can be unrotated. The default value is 90 days.", + "type": "String", + }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-DisablePublicIPAutoAssign", + "default": "SO0111-RevokeUnrotatedKeys", "type": "String", }, }, @@ -6344,7 +13631,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_EC2.15", + "Name": "ASR-PCI_3.2.1_IAM.3", "Tags": [ { "Key": "CdkGenerated", @@ -6355,29 +13642,30 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksEC22ED852ADF": { - "Condition": "ControlRunbooksEnableEC22ConditionB9E0D42E", + "ControlRunbooksIAM70A808F7C": { + "Condition": "ControlRunbooksEnableIAM7ConditionDF8E776B", "DependsOn": [ - "CreateWait2", + "CreateWait5", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.2 + "description": "### Document Name - ASR-AFSBP_1.0.0_IAM.7 ## What does this document do? -This document deletes ingress and egress rules from default security -group using the AWS SSM Runbook AWSConfigRemediation-RemoveVPCDefaultSecurityGroupRules +This document establishes a default password policy. + +## Security Standards and Controls +* AWS FSBP IAM.7 ## Input Parameters * Finding: (Required) Security Hub finding details JSON * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. - ## Output Parameters -* Remediation.Output - Output from AWSConfigRemediation-RemoveVPCDefaultSecurityGroupRules SSM doc +* Remediation.Output ## Documentation Links -* [AFSBP EC2.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-2) +* [AWS FSBP IAM.7](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-iam-7) ", "mainSteps": [ { @@ -6387,113 +13675,123 @@ group using the AWS SSM Runbook AWSConfigRemediation-RemoveVPCDefaultSecurityGro "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "EC2.2", + "IAM.7", + "IAM.11", + "IAM.12", + "IAM.13", + "IAM.14", + "IAM.15", + "IAM.16", + "IAM.17", ], - "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:security-group\\/(sg-[0-9a-f]*)$", + "parse_id_pattern": "", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -6503,101 +13801,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -6619,41 +13958,134 @@ def parse_event(event, _): "Type": "StringMap", }, { - "Name": "GroupId", - "Selector": "$.Payload.resource_id", - "Type": "String", + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + ], + }, + { + "action": "aws:executeScript", + "inputs": { + "Handler": "get_input_params", + "InputPayload": { + "DefaultParams": { + "AllowUsersToChangePassword": "True", + "HardExpiry": "True", + "MaxPasswordAge": "90", + "MinimumPasswordLength": "14", + "PasswordReusePrevention": "24", + "RequireLowercaseCharacters": "True", + "RequireNumbers": "True", + "RequireSymbols": "True", + "RequireUppercaseCharacters": "True", + }, + "SecHubInputParams": "{{ ParseInput.InputParams }}", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import re + + +def parse_non_string_types(param): + if re.match("^\\d+$", str(param)): + param = int(param) + return param + if param == "true" or param == "True": + return True + if param == "false" or param == "False": + return False + if isinstance(param, list): + return param + if len(param.split(",")) > 1: + return param.split(",") + return param + + +def get_input_params(event, _): + security_hub_input_params = event["SecHubInputParams"] + + default_params = event["DefaultParams"] + + input_params = {} + + for param in default_params: + if param in security_hub_input_params: + converted_param = parse_non_string_types(security_hub_input_params[param]) + input_params[param] = converted_param + else: + converted_param = parse_non_string_types(default_params[param]) + input_params[param] = converted_param + + return input_params +", + }, + "name": "GetInputParams", + "outputs": [ + { + "Name": "AllowUsersToChangePassword", + "Selector": "$.Payload.AllowUsersToChangePassword", + "Type": "Boolean", }, { - "Name": "RemediationAccount", - "Selector": "$.Payload.account_id", - "Type": "String", + "Name": "HardExpiry", + "Selector": "$.Payload.HardExpiry", + "Type": "Boolean", }, { - "Name": "RemediationRegion", - "Selector": "$.Payload.resource_region", - "Type": "String", + "Name": "MaxPasswordAge", + "Selector": "$.Payload.MaxPasswordAge", + "Type": "Integer", + }, + { + "Name": "MinimumPasswordLength", + "Selector": "$.Payload.MinimumPasswordLength", + "Type": "Integer", + }, + { + "Name": "RequireSymbols", + "Selector": "$.Payload.RequireSymbols", + "Type": "Boolean", + }, + { + "Name": "RequireNumbers", + "Selector": "$.Payload.RequireNumbers", + "Type": "Boolean", + }, + { + "Name": "RequireUppercaseCharacters", + "Selector": "$.Payload.RequireUppercaseCharacters", + "Type": "Boolean", + }, + { + "Name": "RequireLowercaseCharacters", + "Selector": "$.Payload.RequireLowercaseCharacters", + "Type": "Boolean", + }, + { + "Name": "PasswordReusePrevention", + "Selector": "$.Payload.PasswordReusePrevention", + "Type": "Integer", }, ], }, { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-RemoveVPCDefaultSecurityGroupRules", + "DocumentName": "ASR-SetIAMPasswordPolicy", "RuntimeParameters": { + "AllowUsersToChangePassword": "{{ GetInputParams.AllowUsersToChangePassword }}", "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "GroupId": "{{ ParseInput.GroupId }}", + "HardExpiry": "{{ GetInputParams.HardExpiry }}", + "MaxPasswordAge": "{{ GetInputParams.MaxPasswordAge }}", + "MinimumPasswordLength": "{{ GetInputParams.MinimumPasswordLength }}", + "PasswordReusePrevention": "{{ GetInputParams.PasswordReusePrevention }}", + "RequireLowercaseCharacters": "{{ GetInputParams.RequireLowercaseCharacters }}", + "RequireNumbers": "{{ GetInputParams.RequireNumbers }}", + "RequireSymbols": "{{ GetInputParams.RequireSymbols }}", + "RequireUppercaseCharacters": "{{ GetInputParams.RequireUppercaseCharacters }}", }, - "TargetLocations": [ - { - "Accounts": [ - "{{ ParseInput.RemediationAccount }}", - ], - "ExecutionRoleName": "{{ RemediationRoleName }}", - "Regions": [ - "{{ ParseInput.RemediationRegion }}", - ], - }, - ], }, "name": "Remediation", }, @@ -6668,8 +14100,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Removed rules on default security group", - "UpdatedBy": "ASR-PCI_3.2.1_EC2.2", + "Text": "Established a baseline password policy using the ASR-SetIAMPasswordPolicy runbook.", + "UpdatedBy": "ASR-PCI_3.2.1_IAM.7", }, "Service": "securityhub", "Workflow": { @@ -6691,12 +14123,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the EC2.2 finding", + "description": "The input from the Orchestrator Step function for the IAM.7 finding", "type": "StringMap", }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-RemoveVPCDefaultSecurityGroupRules", + "default": "SO0111-SetIAMPasswordPolicy", "type": "String", }, }, @@ -6704,7 +14136,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_EC2.2", + "Name": "ASR-PCI_3.2.1_IAM.7", "Tags": [ { "Key": "CdkGenerated", @@ -6715,28 +14147,30 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksEC267E3087AE": { - "Condition": "ControlRunbooksEnableEC26ConditionF1F880B0", + "ControlRunbooksIAM8632E03ED": { + "Condition": "ControlRunbooksEnableIAM8Condition9CA5CB4B", "DependsOn": [ - "CreateWait2", + "CreateWait5", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.6 + "description": "### Document Name - ASR-AFSBP_1.0.0_IAM.8 ## What does this document do? -Enables VPC Flow Logs for a VPC +This document ensures that credentials unused for 90 days or greater are disabled. ## Input Parameters * Finding: (Required) Security Hub finding details JSON * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. ## Output Parameters -* Remediation.Output - Remediation results +* Remediation.Output - Output of remediation runbook + +SEE AWSConfigRemediation-RevokeUnusedIAMUserCredentials ## Documentation Links -* [AFSBP EC2.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-6) +* [AWS FSBP IAM.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-iam-8) ", "mainSteps": [ { @@ -6746,113 +14180,116 @@ Enables VPC Flow Logs for a VPC "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "EC2.6", + "IAM.8", ], - "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):ec2:.*:\\d{12}:vpc\\/(vpc-[0-9a-f]{8,17})$", + "parse_id_pattern": "", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -6862,101 +14299,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -6978,18 +14456,13 @@ def parse_event(event, _): "Type": "StringMap", }, { - "Name": "VPC", - "Selector": "$.Payload.resource_id", - "Type": "String", - }, - { - "Name": "RemediationAccount", - "Selector": "$.Payload.account_id", - "Type": "String", + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", }, { - "Name": "RemediationRegion", - "Selector": "$.Payload.resource_region", + "Name": "IAMResourceId", + "Selector": "$.Payload.details.AwsIamUser.UserId", "Type": "String", }, ], @@ -6997,23 +14470,12 @@ def parse_event(event, _): { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-EnableVPCFlowLogs", + "DocumentName": "ASR-RevokeUnusedIAMUserCredentials", "RuntimeParameters": { "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "RemediationRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/SO0111-EnableVPCFlowLogs-remediationRole", - "VPC": "{{ ParseInput.VPC }}", + "IAMResourceId": "{{ ParseInput.IAMResourceId }}", + "MaxCredentialUsageAge": "90", }, - "TargetLocations": [ - { - "Accounts": [ - "{{ ParseInput.RemediationAccount }}", - ], - "ExecutionRoleName": "{{ RemediationRoleName }}", - "Regions": [ - "{{ ParseInput.RemediationRegion }}", - ], - }, - ], }, "name": "Remediation", }, @@ -7028,8 +14490,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Removed rules on default security group", - "UpdatedBy": "ASR-PCI_3.2.1_EC2.6", + "Text": "Deactivated unused keys and expired logins using the ASR-RevokeUnusedIAMUserCredentials runbook.", + "UpdatedBy": "ASR-PCI_3.2.1_IAM.8", }, "Service": "securityhub", "Workflow": { @@ -7051,12 +14513,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the EC2.6 finding", + "description": "The input from the Orchestrator Step function for the IAM.8 finding", "type": "StringMap", }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-EnableVPCFlowLogs", + "default": "SO0111-RevokeUnusedIAMUserCredentials", "type": "String", }, }, @@ -7064,7 +14526,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_EC2.6", + "Name": "ASR-PCI_3.2.1_IAM.8", "Tags": [ { "Key": "CdkGenerated", @@ -7075,25 +14537,29 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksEC277719A4CD": { - "Condition": "ControlRunbooksEnableEC27ConditionC77CF056", + "ControlRunbooksKMS41A22BB8D": { + "Condition": "ControlRunbooksEnableKMS4Condition710C0C5C", "DependsOn": [ - "CreateWait2", + "CreateWait6", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.0_EC2.7 + "description": "### Document Name - ASR-CIS_1.2.0_2.8 + ## What does this document do? -This document enables \`EBS Encryption by default\` for an AWS account in the current region by calling another SSM document +Enables rotation for customer-managed KMS keys. + ## Input Parameters * Finding: (Required) Security Hub finding details JSON * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + ## Output Parameters -* Remediation.Output +* Remediation.Output - Remediation results ## Documentation Links -* [AFSBP EC2.7](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-ec2-7) +* [CIS v1.2.0 2.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-2.8) +* [PCI v3.2.1 PCI.KMS.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-pci-controls.html#pcidss-kms-1) ", "mainSteps": [ { @@ -7103,113 +14569,116 @@ This document enables \`EBS Encryption by default\` for an AWS account in the cu "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "EC2.7", + "KMS.4", ], - "parse_id_pattern": "", + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):kms:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:key\\/([A-Za-z0-9-]{36})$", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -7219,101 +14688,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -7334,15 +14844,47 @@ def parse_event(event, _): "Selector": "$.Payload.object", "Type": "StringMap", }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "KeyId", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, ], }, { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-EnableEbsEncryptionByDefault", + "DocumentName": "ASR-EnableKeyRotation", "RuntimeParameters": { "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "KeyId": "{{ ParseInput.KeyId }}", }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], }, "name": "Remediation", }, @@ -7357,8 +14899,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Enabled EBS encryption by default", - "UpdatedBy": "ASR-PCI_3.2.1_EC2.7", + "Text": "Enabled KMS Customer Managed Key rotation for {{ ParseInput.KeyId }}", + "UpdatedBy": "ASR-PCI_3.2.1_KMS.4", }, "Service": "securityhub", "Workflow": { @@ -7380,12 +14922,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the EC2.7 finding", + "description": "The input from the Orchestrator Step function for the KMS.4 finding", "type": "StringMap", }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-EnableEbsEncryptionByDefault", + "default": "SO0111-EnableKeyRotation", "type": "String", }, }, @@ -7393,7 +14935,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_EC2.7", + "Name": "ASR-PCI_3.2.1_KMS.4", "Tags": [ { "Key": "CdkGenerated", @@ -7404,28 +14946,27 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksIAM18ACE62321": { - "Condition": "ControlRunbooksEnableIAM18ConditionC6288150", + "ControlRunbooksLambda1F6ECACF8": { + "Condition": "ControlRunbooksEnableLambda1Condition077CECAF", "DependsOn": [ - "CreateWait4", + "CreateWait6", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-CIS_1.2.0_1.20 + "description": "### Document Name - ASR-AFSBP_1.0.0_Lambda.1 ## What does this document do? -Creates a support role to allow AWS Support access. +This document removes the public resource policy. A public resource policy +contains a principal "*" or AWS: "*", which allows public access to the +function. The remediation is to remove the SID of the public policy. ## Input Parameters * Finding: (Required) Security Hub finding details JSON * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. -## Output Parameters -* Remediation.Output - Output of CreateRole API. - ## Documentation Links -* [CIS v1.2.0 1.20](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-1.20) +* [AWS FSBP Lambda.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-lambda-1) ", "mainSteps": [ { @@ -7435,113 +14976,116 @@ Creates a support role to allow AWS Support access. "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "IAM.18", + "Lambda.1", ], - "parse_id_pattern": "", + "parse_id_pattern": "^arn:(?:aws|aws-us-gov|aws-cn):lambda:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:function:([a-zA-Z0-9\\-_]{1,64})$", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -7551,101 +15095,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -7666,15 +15251,47 @@ def parse_event(event, _): "Selector": "$.Payload.object", "Type": "StringMap", }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "FunctionName", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, ], }, { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-CreateIAMSupportRole", + "DocumentName": "ASR-RemoveLambdaPublicAccess", "RuntimeParameters": { "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", + "FunctionName": "{{ ParseInput.FunctionName }}", }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], }, "name": "Remediation", }, @@ -7689,8 +15306,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Create an IAM role to allow authorized users to manage incidents with AWS Support using the ASR-CreateIAMSupportRole runbook.", - "UpdatedBy": "ASR-PCI_3.2.1_IAM.18", + "Text": "Lamdba {{ ParseInput.FunctionName }} policy updated to remove public access", + "UpdatedBy": "ASR-PCI_3.2.1_Lambda.1", }, "Service": "securityhub", "Workflow": { @@ -7712,12 +15329,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the IAM.18 finding", + "description": "The input from the Orchestrator Step function for the Lambda.1 finding", "type": "StringMap", }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-CreateIAMSupportRole", + "default": "SO0111-RemoveLambdaPublicAccess", "type": "String", }, }, @@ -7725,7 +15342,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_IAM.18", + "Name": "ASR-PCI_3.2.1_Lambda.1", "Tags": [ { "Key": "CdkGenerated", @@ -7736,30 +15353,28 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksIAM2280FCB95D": { - "Condition": "ControlRunbooksEnableIAM22Condition387158E7", + "ControlRunbooksRDS13FCEA51BD": { + "Condition": "ControlRunbooksEnableRDS13Condition0E8A44B3", "DependsOn": [ - "CreateWait4", + "CreateWait8", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.0_IAM.8 + "description": "### Document Name - ASR-AFSBP_1.0.0_RDS.13 ## What does this document do? -This document ensures that credentials unused for 90 days or greater are disabled. +This document enables \`Auto minor version upgrade\` on a given Amazon RDS instance by calling another SSM document. ## Input Parameters * Finding: (Required) Security Hub finding details JSON * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. ## Output Parameters -* Remediation.Output - Output of remediation runbook - -SEE AWSConfigRemediation-RevokeUnusedIAMUserCredentials +* Remediation.Output - The standard HTTP response from the ModifyDBInstance API. ## Documentation Links -* [AFSBP IAM.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-iam-8) +* [AWS FSBP RDS.13](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-13) ", "mainSteps": [ { @@ -7769,113 +15384,116 @@ SEE AWSConfigRemediation-RevokeUnusedIAMUserCredentials "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "IAM.22", + "RDS.13", ], "parse_id_pattern": "", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -7885,101 +15503,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -8001,8 +15660,23 @@ def parse_event(event, _): "Type": "StringMap", }, { - "Name": "IAMResourceId", - "Selector": "$.Payload.details.AwsIamUser.UserId", + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + { + "Name": "DBInstanceIdentifier", + "Selector": "$.Payload.resource.Details.AwsRdsDbInstance.DBInstanceIdentifier", "Type": "String", }, ], @@ -8010,12 +15684,22 @@ def parse_event(event, _): { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-RevokeUnusedIAMUserCredentials", + "DocumentName": "ASR-EnableMinorVersionUpgradeOnRDSDBInstance", "RuntimeParameters": { "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "IAMResourceId": "{{ ParseInput.IAMResourceId }}", - "MaxCredentialUsageAge": "45", + "DBInstanceIdentifier": "{{ ParseInput.DBInstanceIdentifier }}", }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], }, "name": "Remediation", }, @@ -8030,8 +15714,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Deactivated unused keys and expired logins using the ASR-RevokeUnusedIAMUserCredentials runbook.", - "UpdatedBy": "ASR-PCI_3.2.1_IAM.22", + "Text": "Minor Version enabled on the RDS Instance or Multi-AZ RDS Cluster.", + "UpdatedBy": "ASR-PCI_3.2.1_RDS.13", }, "Service": "securityhub", "Workflow": { @@ -8053,12 +15737,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the IAM.22 finding", + "description": "The input from the Orchestrator Step function for the RDS.13 finding", "type": "StringMap", }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-RevokeUnusedIAMUserCredentials", + "default": "SO0111-EnableMinorVersionUpgradeOnRDSDBInstance", "type": "String", }, }, @@ -8066,7 +15750,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_IAM.22", + "Name": "ASR-PCI_3.2.1_RDS.13", "Tags": [ { "Key": "CdkGenerated", @@ -8077,28 +15761,28 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksIAM3DC25477E": { - "Condition": "ControlRunbooksEnableIAM3Condition3AA0E892", + "ControlRunbooksRDS16EB04DCBF": { + "Condition": "ControlRunbooksEnableRDS16ConditionCB5C3E8F", "DependsOn": [ - "CreateWait3", + "CreateWait8", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.0_IAM.3 + "description": "### Document Name - ASR-AFSBP_1.0.0_RDS.16 ## What does this document do? -This document disables active keys that have not been rotated for more than 90 days. Note that this remediation is **DISRUPTIVE**. +This document enables \`Copy tags to snapshots\` on a given Amazon RDS cluster by calling another SSM document. ## Input Parameters * Finding: (Required) Security Hub finding details JSON * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. ## Output Parameters -* Remediation.Output +* Remediation.Output - The standard HTTP response from the ModifyDBCluster API. ## Documentation Links -* [AFSBP v1.0.0 IAM.3](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-iam-3) +* [AWS FSBP RDS.16](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-16) ", "mainSteps": [ { @@ -8108,113 +15792,116 @@ This document disables active keys that have not been rotated for more than 90 d "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "IAM.3", + "RDS.16", ], - "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):iam::\\d{12}:user(?:(?:\\u002F)|(?:\\u002F[\\u0021-\\u007F]{1,510}\\u002F))([\\w+=,.@-]{1,64})$", + "parse_id_pattern": "", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -8224,101 +15911,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -8340,13 +16068,23 @@ def parse_event(event, _): "Type": "StringMap", }, { - "Name": "IAMUser", - "Selector": "$.Payload.resource_id", + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", "Type": "String", }, { - "Name": "IAMResourceId", - "Selector": "$.Payload.details.AwsIamUser.UserId", + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + { + "Name": "DbiResourceId", + "Selector": "$.Payload.details.AwsRdsDbCluster.DbClusterResourceId", "Type": "String", }, ], @@ -8354,12 +16092,23 @@ def parse_event(event, _): { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-RevokeUnrotatedKeys", + "DocumentName": "ASR-EnableCopyTagsToSnapshotOnRDSCluster", "RuntimeParameters": { + "ApplyImmediately": true, "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "IAMResourceId": "{{ ParseInput.IAMResourceId }}", - "MaxCredentialUsageAge": "{{ MaxCredentialUsageAge }}", + "DbClusterResourceId": "{{ ParseInput.DbiResourceId }}", }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], }, "name": "Remediation", }, @@ -8374,8 +16123,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Deactivated unrotated keys for {{ ParseInput.IAMUser }}.", - "UpdatedBy": "ASR-PCI_3.2.1_IAM.3", + "Text": "Copy Tags to Snapshots enabled on RDS DB cluster", + "UpdatedBy": "ASR-PCI_3.2.1_RDS.16", }, "Service": "securityhub", "Workflow": { @@ -8396,19 +16145,13 @@ def parse_event(event, _): "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", "type": "String", }, - "Finding": { - "description": "The input from the Orchestrator Step function for the IAM.3 finding", - "type": "StringMap", - }, - "MaxCredentialUsageAge": { - "allowedPattern": "^(?:[1-9]\\d{0,3}|10000)$", - "default": "90", - "description": "(Required) Maximum number of days a key can be unrotated. The default value is 90 days.", - "type": "String", + "Finding": { + "description": "The input from the Orchestrator Step function for the RDS.16 finding", + "type": "StringMap", }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-RevokeUnrotatedKeys", + "default": "SO0111-EnableCopyTagsToSnapshotOnRDSCluster", "type": "String", }, }, @@ -8416,7 +16159,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_IAM.3", + "Name": "ASR-PCI_3.2.1_RDS.16", "Tags": [ { "Key": "CdkGenerated", @@ -8427,30 +16170,24 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksIAM70A808F7C": { - "Condition": "ControlRunbooksEnableIAM7ConditionDF8E776B", + "ControlRunbooksRDS1D73701E9": { + "Condition": "ControlRunbooksEnableRDS1ConditionFAE5B7EA", "DependsOn": [ - "CreateWait3", + "CreateWait6", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.0_IAM.7 - + "description": "### Document Name - ASR-AFSBP_1.0.0_RDS.1 ## What does this document do? -This document establishes a default password policy. - -## Security Standards and Controls -* AFSBP IAM.7 +This document changes public RDS snapshot to private ## Input Parameters * Finding: (Required) Security Hub finding details JSON * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. -## Output Parameters -* Remediation.Output ## Documentation Links -* [AFSBP IAM.7](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-iam-7) +* [AWS FSBP RDS.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-1) ", "mainSteps": [ { @@ -8460,120 +16197,117 @@ This document establishes a default password policy. "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "IAM.7", - "IAM.11", - "IAM.12", - "IAM.13", - "IAM.14", - "IAM.15", - "IAM.16", - "IAM.17", + "RDS.1", ], - "parse_id_pattern": "", + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:(cluster-snapshot|snapshot):([a-zA-Z][0-9a-zA-Z]*(?:-[0-9a-zA-Z]+)*)$", + "resource_index": 2, }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -8583,101 +16317,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -8698,24 +16473,53 @@ def parse_event(event, _): "Selector": "$.Payload.object", "Type": "StringMap", }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "DBSnapshotId", + "Selector": "$.Payload.resource_id", + "Type": "String", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + { + "Name": "DBSnapshotType", + "Selector": "$.Payload.matches[0]", + "Type": "String", + }, ], }, { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-SetIAMPasswordPolicy", + "DocumentName": "ASR-MakeRDSSnapshotPrivate", "RuntimeParameters": { - "AllowUsersToChangePassword": true, "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "HardExpiry": true, - "MaxPasswordAge": 90, - "MinimumPasswordLength": 14, - "PasswordReusePrevention": 24, - "RequireLowercaseCharacters": true, - "RequireNumbers": true, - "RequireSymbols": true, - "RequireUppercaseCharacters": true, + "DBSnapshotId": "{{ ParseInput.DBSnapshotId }}", + "DBSnapshotType": "{{ ParseInput.DBSnapshotType }}", }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], }, "name": "Remediation", }, @@ -8730,8 +16534,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Established a baseline password policy using the ASR-SetIAMPasswordPolicy runbook.", - "UpdatedBy": "ASR-PCI_3.2.1_IAM.7", + "Text": "RDS DB Snapshot modified to private", + "UpdatedBy": "ASR-PCI_3.2.1_RDS.1", }, "Service": "securityhub", "Workflow": { @@ -8753,12 +16557,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the IAM.7 finding", + "description": "The input from the Orchestrator Step function for the RDS.1 finding", "type": "StringMap", }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-SetIAMPasswordPolicy", + "default": "SO0111-MakeRDSSnapshotPrivate", "type": "String", }, }, @@ -8766,7 +16570,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_IAM.7", + "Name": "ASR-PCI_3.2.1_RDS.1", "Tags": [ { "Key": "CdkGenerated", @@ -8777,30 +16581,28 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksIAM8632E03ED": { - "Condition": "ControlRunbooksEnableIAM8Condition9CA5CB4B", + "ControlRunbooksRDS2FBE04686": { + "Condition": "ControlRunbooksEnableRDS2Condition4FD00FE6", "DependsOn": [ - "CreateWait3", + "CreateWait7", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.0_IAM.8 - + "description": "### Document Name - ASR-AFSBP_1.0.0_RDS.2 ## What does this document do? -This document ensures that credentials unused for 90 days or greater are disabled. +This document disables public access to RDS instances by calling another SSM document ## Input Parameters * Finding: (Required) Security Hub finding details JSON * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. -## Output Parameters -* Remediation.Output - Output of remediation runbook - -SEE AWSConfigRemediation-RevokeUnusedIAMUserCredentials - ## Documentation Links -* [AFSBP IAM.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-iam-8) +* [AWS FSBP RDS.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-2) + +## Troubleshooting +* ModifyDBInstance isn't supported for a DB instance in a Multi-AZ DB Cluster. + - This remediation will not work on an instance within a MySQL or PostgreSQL Multi-AZ Cluster due to limitations with the RDS API. ", "mainSteps": [ { @@ -8810,113 +16612,116 @@ SEE AWSConfigRemediation-RevokeUnusedIAMUserCredentials "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "IAM.8", + "RDS.2", ], - "parse_id_pattern": "", + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:db:((?!.*--.*)(?!.*-$)[a-z][a-z0-9-]{0,62})$", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -8926,101 +16731,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -9042,8 +16888,23 @@ def parse_event(event, _): "Type": "StringMap", }, { - "Name": "IAMResourceId", - "Selector": "$.Payload.details.AwsIamUser.UserId", + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", + "Type": "String", + }, + { + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + { + "Name": "DbiResourceId", + "Selector": "$.Payload.resource.Details.AwsRdsDbInstance.DbiResourceId", "Type": "String", }, ], @@ -9051,12 +16912,22 @@ def parse_event(event, _): { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-RevokeUnusedIAMUserCredentials", + "DocumentName": "ASR-DisablePublicAccessToRDSInstance", "RuntimeParameters": { "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "IAMResourceId": "{{ ParseInput.IAMResourceId }}", - "MaxCredentialUsageAge": "90", + "DbiResourceId": "{{ ParseInput.DbiResourceId }}", }, + "TargetLocations": [ + { + "Accounts": [ + "{{ ParseInput.RemediationAccount }}", + ], + "ExecutionRoleName": "{{ RemediationRoleName }}", + "Regions": [ + "{{ ParseInput.RemediationRegion }}", + ], + }, + ], }, "name": "Remediation", }, @@ -9071,8 +16942,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Deactivated unused keys and expired logins using the ASR-RevokeUnusedIAMUserCredentials runbook.", - "UpdatedBy": "ASR-PCI_3.2.1_IAM.8", + "Text": "Disabled public access to RDS instance", + "UpdatedBy": "ASR-PCI_3.2.1_RDS.2", }, "Service": "securityhub", "Workflow": { @@ -9094,12 +16965,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the IAM.8 finding", + "description": "The input from the Orchestrator Step function for the RDS.2 finding", "type": "StringMap", }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-RevokeUnusedIAMUserCredentials", + "default": "SO0111-DisablePublicAccessToRDSInstance", "type": "String", }, }, @@ -9107,7 +16978,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_IAM.8", + "Name": "ASR-PCI_3.2.1_RDS.2", "Tags": [ { "Key": "CdkGenerated", @@ -9118,29 +16989,27 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksKMS41A22BB8D": { - "Condition": "ControlRunbooksEnableKMS4Condition710C0C5C", + "ControlRunbooksRDS4C82F2410": { + "Condition": "ControlRunbooksEnableRDS4Condition2E89346E", "DependsOn": [ - "CreateWait4", + "CreateWait7", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-CIS_1.2.0_2.8 + "description": "### Document Name - ASR-AFSBP_1.0.0_RDS.4 ## What does this document do? -Enables rotation for customer-managed KMS keys. +This document encrypts an unencrypted RDS snapshot by calling another SSM document ## Input Parameters * Finding: (Required) Security Hub finding details JSON -* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. - -## Output Parameters -* Remediation.Output - Remediation results +* AutomationAssumeRole: (Optional) The ARN of the role that allows Automation to perform the actions on your behalf. +* RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. +* KMSKeyId: (Optional) ID, ARN or Alias for the AWS KMS Customer-Managed Key (CMK) to use to encrypt the snapshot. ## Documentation Links -* [CIS v1.2.0 2.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-2.8) -* [PCI v3.2.1 PCI.KMS.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-pci-controls.html#pcidss-kms-1) +* [AWS FSBP RDS.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-4) ", "mainSteps": [ { @@ -9150,113 +17019,117 @@ Enables rotation for customer-managed KMS keys. "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "KMS.4", + "RDS.4", ], - "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):kms:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:key\\/([A-Za-z0-9-]{36})$", + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:((?:cluster-)?snapshot|dbclustersnapshot):((?:rds:|awsbackup:)?((?!.*--.*)(?!.*-$)[a-zA-Z][a-zA-Z0-9-]{0,254}))$", + "resource_index": 2, }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -9266,101 +17139,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -9382,9 +17296,9 @@ def parse_event(event, _): "Type": "StringMap", }, { - "Name": "KeyId", - "Selector": "$.Payload.resource_id", - "Type": "String", + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", }, { "Name": "RemediationAccount", @@ -9392,8 +17306,23 @@ def parse_event(event, _): "Type": "String", }, { - "Name": "RemediationRegion", - "Selector": "$.Payload.resource_region", + "Name": "RemediationRegion", + "Selector": "$.Payload.resource_region", + "Type": "String", + }, + { + "Name": "SourceDBSnapshotIdentifier", + "Selector": "$.Payload.matches[1]", + "Type": "String", + }, + { + "Name": "SourceDBSnapshotIdentifierNoPrefix", + "Selector": "$.Payload.matches[2]", + "Type": "String", + }, + { + "Name": "DBSnapshotType", + "Selector": "$.Payload.matches[0]", "Type": "String", }, ], @@ -9401,10 +17330,13 @@ def parse_event(event, _): { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-EnableKeyRotation", + "DocumentName": "ASR-EncryptRDSSnapshot", "RuntimeParameters": { "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "KeyId": "{{ ParseInput.KeyId }}", + "DBSnapshotType": "{{ ParseInput.DBSnapshotType }}", + "KmsKeyId": "{{ KMSKeyId }}", + "SourceDBSnapshotIdentifier": "{{ ParseInput.SourceDBSnapshotIdentifier }}", + "TargetDBSnapshotIdentifier": "{{ ParseInput.SourceDBSnapshotIdentifierNoPrefix }}-encrypted", }, "TargetLocations": [ { @@ -9431,8 +17363,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Enabled KMS Customer Managed Key rotation for {{ ParseInput.KeyId }}", - "UpdatedBy": "ASR-PCI_3.2.1_KMS.4", + "Text": "Encrypted RDS snapshot", + "UpdatedBy": "ASR-PCI_3.2.1_RDS.4", }, "Service": "securityhub", "Workflow": { @@ -9454,12 +17386,18 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the KMS.4 finding", + "description": "The input from the Orchestrator Step function for the RDS.4 finding", "type": "StringMap", }, + "KMSKeyId": { + "allowedPattern": "^(?:arn:(?:aws|aws-us-gov|aws-cn):kms:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:)?(?:(?:alias\\/[A-Za-z0-9/_-]+)|(?:key\\/(?:[0-9a-fA-F]{8}-(?:[0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12})))$", + "default": "alias/aws/rds", + "description": "(Optional) ID, ARN or Alias for the AWS KMS Customer-Managed Key (CMK) to use to encrypt the snapshot.", + "type": "String", + }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-EnableKeyRotation", + "default": "SO0111-EncryptRDSSnapshot", "type": "String", }, }, @@ -9467,7 +17405,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_KMS.4", + "Name": "ASR-PCI_3.2.1_RDS.4", "Tags": [ { "Key": "CdkGenerated", @@ -9478,27 +17416,26 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksLambda1F6ECACF8": { - "Condition": "ControlRunbooksEnableLambda1Condition077CECAF", + "ControlRunbooksRDS5CECD9314": { + "Condition": "ControlRunbooksEnableRDS5ConditionEC2574C3", "DependsOn": [ - "CreateWait4", + "CreateWait7", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.0_Lambda.1 + "description": "### Document Name - ASR-AFSBP_1.0.RDS.5 ## What does this document do? -This document removes the public resource policy. A public resource policy -contains a principal "*" or AWS: "*", which allows public access to the -function. The remediation is to remove the SID of the public policy. +This document configures an RDS DB instance for multiple Availability Zones by calling another SSM document. ## Input Parameters * Finding: (Required) Security Hub finding details JSON -* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +* AutomationAssumeRole: (Optional) The ARN of the role that allows Automation to perform the actions on your behalf. +* RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. ## Documentation Links -* [AFSBP Lambda.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-lambda-1) +* [AWS FSBP RDS.5](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-5) ", "mainSteps": [ { @@ -9508,113 +17445,116 @@ function. The remediation is to remove the SID of the public policy. "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "Lambda.1", + "RDS.5", ], - "parse_id_pattern": "^arn:(?:aws|aws-us-gov|aws-cn):lambda:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:function:([a-zA-Z0-9\\-_]{1,64})$", + "parse_id_pattern": "", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -9624,101 +17564,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -9740,9 +17721,9 @@ def parse_event(event, _): "Type": "StringMap", }, { - "Name": "FunctionName", - "Selector": "$.Payload.resource_id", - "Type": "String", + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", }, { "Name": "RemediationAccount", @@ -9754,15 +17735,21 @@ def parse_event(event, _): "Selector": "$.Payload.resource_region", "Type": "String", }, + { + "Name": "DbiResourceId", + "Selector": "$.Payload.resource.Details.AwsRdsDbInstance.DbiResourceId", + "Type": "String", + }, ], }, { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-RemoveLambdaPublicAccess", + "DocumentName": "ASR-EnableMultiAZOnRDSInstance", "RuntimeParameters": { + "ApplyImmediately": true, "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "FunctionName": "{{ ParseInput.FunctionName }}", + "DbiResourceId": "{{ ParseInput.DbiResourceId }}", }, "TargetLocations": [ { @@ -9789,8 +17776,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Lamdba {{ ParseInput.FunctionName }} policy updated to remove public access", - "UpdatedBy": "ASR-PCI_3.2.1_Lambda.1", + "Text": "Configured RDS cluster for multiple Availability Zones", + "UpdatedBy": "ASR-PCI_3.2.1_RDS.5", }, "Service": "securityhub", "Workflow": { @@ -9812,12 +17799,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the Lambda.1 finding", + "description": "The input from the Orchestrator Step function for the RDS.5 finding", "type": "StringMap", }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-RemoveLambdaPublicAccess", + "default": "SO0111-EnableMultiAZOnRDSInstance", "type": "String", }, }, @@ -9825,7 +17812,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_Lambda.1", + "Name": "ASR-PCI_3.2.1_RDS.5", "Tags": [ { "Key": "CdkGenerated", @@ -9836,28 +17823,28 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksRDS13FCEA51BD": { - "Condition": "ControlRunbooksEnableRDS13Condition0E8A44B3", + "ControlRunbooksRDS6082B0D6B": { + "Condition": "ControlRunbooksEnableRDS6Condition4A60A39B", "DependsOn": [ - "CreateWait6", + "CreateWait7", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.0_RDS.13 + "description": "### Document Name - ASR-AFSBP_1.0.0_RDS.6 ## What does this document do? -This document enables \`Auto minor version upgrade\` on a given Amazon RDS instance by calling another SSM document. +This document enables \`Enhanced Monitoring\` on a given Amazon RDS instance by calling another SSM document. ## Input Parameters * Finding: (Required) Security Hub finding details JSON * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. ## Output Parameters -* Remediation.Output - The standard HTTP response from the ModifyDBInstance API. - +* VerifyRemediation.Output - The standard HTTP response from the ModifyDBInstance API. ## Documentation Links -* [AFSBP RDS.13](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-13) + +* [AWS FSBP RDS.6](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-6) ", "mainSteps": [ { @@ -9867,113 +17854,116 @@ This document enables \`Auto minor version upgrade\` on a given Amazon RDS insta "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "RDS.13", + "RDS.6", ], "parse_id_pattern": "", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -9983,101 +17973,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -10098,6 +18129,11 @@ def parse_event(event, _): "Selector": "$.Payload.object", "Type": "StringMap", }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, { "Name": "RemediationAccount", "Selector": "$.Payload.account_id", @@ -10109,19 +18145,37 @@ def parse_event(event, _): "Type": "String", }, { - "Name": "DBInstanceIdentifier", - "Selector": "$.Payload.resource.Details.AwsRdsDbInstance.DBInstanceIdentifier", + "Name": "DbiResourceId", + "Selector": "$.Payload.resource.Details.AwsRdsDbInstance.DbiResourceId", + "Type": "String", + }, + ], + }, + { + "action": "aws:executeAwsApi", + "inputs": { + "Api": "GetRole", + "RoleName": "SO0111-RDSMonitoring-remediationRole", + "Service": "iam", + }, + "name": "GetMonitoringRoleArn", + "outputs": [ + { + "Name": "Arn", + "Selector": "$.Role.Arn", "Type": "String", }, ], + "timeoutSeconds": 600, }, { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-EnableMinorVersionUpgradeOnRDSDBInstance", + "DocumentName": "ASR-EnableEnhancedMonitoringOnRDSInstance", "RuntimeParameters": { "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "DBInstanceIdentifier": "{{ ParseInput.DBInstanceIdentifier }}", + "MonitoringRoleArn": "{{ GetMonitoringRoleArn.Arn }}", + "ResourceId": "{{ ParseInput.DbiResourceId }}", }, "TargetLocations": [ { @@ -10148,8 +18202,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Minor Version enabled on the RDS Instance or Multi-AZ RDS Cluster.", - "UpdatedBy": "ASR-PCI_3.2.1_RDS.13", + "Text": "Enhanced Monitoring enabled on RDS DB cluster", + "UpdatedBy": "ASR-PCI_3.2.1_RDS.6", }, "Service": "securityhub", "Workflow": { @@ -10171,12 +18225,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the RDS.13 finding", + "description": "The input from the Orchestrator Step function for the RDS.6 finding", "type": "StringMap", }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-EnableMinorVersionUpgradeOnRDSDBInstance", + "default": "SO0111-EnableEnhancedMonitoringOnRDSInstance", "type": "String", }, }, @@ -10184,7 +18238,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_RDS.13", + "Name": "ASR-PCI_3.2.1_RDS.6", "Tags": [ { "Key": "CdkGenerated", @@ -10195,18 +18249,18 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksRDS16EB04DCBF": { - "Condition": "ControlRunbooksEnableRDS16ConditionCB5C3E8F", + "ControlRunbooksRDS715C0A01A": { + "Condition": "ControlRunbooksEnableRDS7ConditionE53509B0", "DependsOn": [ - "CreateWait6", + "CreateWait7", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.0_RDS.16 + "description": "### Document Name - ASR-AFSBP_1.0.0_RDS.7 ## What does this document do? -This document enables \`Copy tags to snapshots\` on a given Amazon RDS cluster by calling another SSM document. +This document enables \`Deletion Protection\` on a given Amazon RDS cluster by calling another SSM document. ## Input Parameters * Finding: (Required) Security Hub finding details JSON @@ -10216,7 +18270,7 @@ This document enables \`Copy tags to snapshots\` on a given Amazon RDS cluster b * Remediation.Output - The standard HTTP response from the ModifyDBCluster API. ## Documentation Links -* [AFSBP RDS.16](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-16) +* [AWS FSBP RDS.7](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-7) ", "mainSteps": [ { @@ -10226,113 +18280,116 @@ This document enables \`Copy tags to snapshots\` on a given Amazon RDS cluster b "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "RDS.16", + "RDS.7", ], "parse_id_pattern": "", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -10342,101 +18399,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -10457,6 +18555,11 @@ def parse_event(event, _): "Selector": "$.Payload.object", "Type": "StringMap", }, + { + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, { "Name": "RemediationAccount", "Selector": "$.Payload.account_id", @@ -10477,11 +18580,10 @@ def parse_event(event, _): { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-EnableCopyTagsToSnapshotOnRDSCluster", + "DocumentName": "ASR-EnableRDSClusterDeletionProtection", "RuntimeParameters": { - "ApplyImmediately": true, "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "DbClusterResourceId": "{{ ParseInput.DbiResourceId }}", + "ClusterId": "{{ ParseInput.DbiResourceId }}", }, "TargetLocations": [ { @@ -10508,8 +18610,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Copy Tags to Snapshots enabled on RDS DB cluster", - "UpdatedBy": "ASR-PCI_3.2.1_RDS.16", + "Text": "Deletion protection enabled on RDS DB cluster", + "UpdatedBy": "ASR-PCI_3.2.1_RDS.7", }, "Service": "securityhub", "Workflow": { @@ -10531,12 +18633,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the RDS.16 finding", + "description": "The input from the Orchestrator Step function for the RDS.7 finding", "type": "StringMap", }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-EnableCopyTagsToSnapshotOnRDSCluster", + "default": "SO0111-EnableRDSClusterDeletionProtection", "type": "String", }, }, @@ -10544,7 +18646,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_RDS.16", + "Name": "ASR-PCI_3.2.1_RDS.7", "Tags": [ { "Key": "CdkGenerated", @@ -10555,24 +18657,26 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksRDS1D73701E9": { - "Condition": "ControlRunbooksEnableRDS1ConditionFAE5B7EA", + "ControlRunbooksRDS89256480A": { + "Condition": "ControlRunbooksEnableRDS8Condition8F460AB5", "DependsOn": [ - "CreateWait4", + "CreateWait8", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.0_RDS.1 + "description": "### Document Name - ASR-AFSBP_1.0.RDS.8 + ## What does this document do? -This document changes public RDS snapshot to private +This document enables \`Deletion Protection\` on a given Amazon RDS cluster by calling another SSM document. ## Input Parameters * Finding: (Required) Security Hub finding details JSON -* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +* AutomationAssumeRole: (Optional) The ARN of the role that allows Automation to perform the actions on your behalf. +* RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. ## Documentation Links -* [AFSBP RDS.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-1) +* [AWS FSBP RDS.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-8) ", "mainSteps": [ { @@ -10582,114 +18686,116 @@ This document changes public RDS snapshot to private "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "RDS.1", + "RDS.8", ], - "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:(cluster-snapshot|snapshot):([a-zA-Z](?:[0-9a-zA-Z]+-)*[0-9a-zA-Z]+)$", - "resource_index": 2, + "parse_id_pattern": "", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -10699,101 +18805,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -10815,9 +18962,9 @@ def parse_event(event, _): "Type": "StringMap", }, { - "Name": "DBSnapshotId", - "Selector": "$.Payload.resource_id", - "Type": "String", + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", }, { "Name": "RemediationAccount", @@ -10830,8 +18977,8 @@ def parse_event(event, _): "Type": "String", }, { - "Name": "DBSnapshotType", - "Selector": "$.Payload.matches[0]", + "Name": "DbiResourceId", + "Selector": "$.Payload.resource.Details.AwsRdsDbInstance.DbiResourceId", "Type": "String", }, ], @@ -10839,11 +18986,11 @@ def parse_event(event, _): { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-MakeRDSSnapshotPrivate", + "DocumentName": "ASR-EnableRDSInstanceDeletionProtection", "RuntimeParameters": { + "ApplyImmediately": true, "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "DBSnapshotId": "{{ ParseInput.DBSnapshotId }}", - "DBSnapshotType": "{{ ParseInput.DBSnapshotType }}", + "DbInstanceResourceId": "{{ ParseInput.DbiResourceId }}", }, "TargetLocations": [ { @@ -10870,8 +19017,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "RDS DB Snapshot modified to private", - "UpdatedBy": "ASR-PCI_3.2.1_RDS.1", + "Text": "Enabled deletion protection on RDS instance", + "UpdatedBy": "ASR-PCI_3.2.1_RDS.8", }, "Service": "securityhub", "Workflow": { @@ -10893,12 +19040,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the RDS.1 finding", + "description": "The input from the Orchestrator Step function for the RDS.8 finding", "type": "StringMap", }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-MakeRDSSnapshotPrivate", + "default": "SO0111-EnableRDSInstanceDeletionProtection", "type": "String", }, }, @@ -10906,7 +19053,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_RDS.1", + "Name": "ASR-PCI_3.2.1_RDS.8", "Tags": [ { "Key": "CdkGenerated", @@ -10917,28 +19064,26 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksRDS2FBE04686": { - "Condition": "ControlRunbooksEnableRDS2Condition4FD00FE6", + "ControlRunbooksRedshift1789871EB": { + "Condition": "ControlRunbooksEnableRedshift1Condition3449D560", "DependsOn": [ - "CreateWait5", + "CreateWait8", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.0_RDS.2 + "description": "### Document Name - ASR-AFSBP_1.0.0_Redshift.1 + ## What does this document do? -This document disables public access to RDS instances by calling another SSM document +This document disables public access to a Redshift cluster by calling another SSM document ## Input Parameters * Finding: (Required) Security Hub finding details JSON -* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +* AutomationAssumeRole: (Optional) The ARN of the role that allows Automation to perform the actions on your behalf. +* RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. ## Documentation Links -* [AFSBP RDS.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-2) - -## Troubleshooting -* ModifyDBInstance isn't supported for a DB instance in a Multi-AZ DB Cluster. - - This remediation will not work on an instance within a MySQL or PostgreSQL Multi-AZ Cluster due to limitations with the RDS API. +* [AWS FSBP Redshift.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-redshift-1) ", "mainSteps": [ { @@ -10948,113 +19093,116 @@ This document disables public access to RDS instances by calling another SSM doc "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "RDS.2", + "Redshift.1", ], - "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:db:((?!.*--.*)(?!.*-$)[a-z][a-z0-9-]{0,62})$", + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):redshift:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:cluster:(?!.*--)([a-z][a-z0-9-]{0,62})(? 1: + return param.split(",") + return param + + +def get_input_params(event, _): + security_hub_input_params = event["SecHubInputParams"] + + default_params = event["DefaultParams"] + + input_params = {} + + for param in default_params: + if param in security_hub_input_params: + converted_param = parse_non_string_types(security_hub_input_params[param]) + input_params[param] = converted_param + else: + converted_param = parse_non_string_types(default_params[param]) + input_params[param] = converted_param + + return input_params +", + }, + "name": "GetInputParams", + "outputs": [ { - "Name": "DbiResourceId", - "Selector": "$.Payload.details.AwsRdsDbCluster.DbClusterResourceId", - "Type": "String", + "Name": "eventTypes", + "Selector": "$.Payload.eventTypes", + "Type": "StringList", }, ], }, { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-EnableRDSClusterDeletionProtection", + "DocumentName": "ASR-EnableBucketEventNotifications", "RuntimeParameters": { + "AccountId": "{{ ParseInput.RemediationAccount }}", "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "ClusterId": "{{ ParseInput.DbiResourceId }}", + "BucketName": "{{ ParseInput.BucketName }}", + "EventTypes": "{{ GetInputParams.eventTypes }}", }, - "TargetLocations": [ - { - "Accounts": [ - "{{ ParseInput.RemediationAccount }}", - ], - "ExecutionRoleName": "{{ RemediationRoleName }}", - "Regions": [ - "{{ ParseInput.RemediationRegion }}", - ], - }, - ], }, "name": "Remediation", }, @@ -12701,8 +21293,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Deletion protection enabled on RDS DB cluster", - "UpdatedBy": "ASR-PCI_3.2.1_RDS.7", + "Text": "Configured event notifications to an S3 Bucket.", + "UpdatedBy": "ASR-PCI_3.2.1_S3.11", }, "Service": "securityhub", "Workflow": { @@ -12724,12 +21316,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the RDS.7 finding", + "description": "The input from the Orchestrator Step function for the S3.11 finding", "type": "StringMap", }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-EnableRDSClusterDeletionProtection", + "default": "SO0111-EnableBucketEventNotifications", "type": "String", }, }, @@ -12737,7 +21329,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_RDS.7", + "Name": "ASR-PCI_3.2.1_S3.11", "Tags": [ { "Key": "CdkGenerated", @@ -12748,26 +21340,28 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksRDS89256480A": { - "Condition": "ControlRunbooksEnableRDS8Condition8F460AB5", + "ControlRunbooksS311C5AAD45": { + "Condition": "ControlRunbooksEnableS31Condition25C33B3F", "DependsOn": [ - "CreateWait6", + "CreateWait9", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.RDS.8 + "description": "### Document Name - ASR-AFSBP_1.0.0_S3.1 ## What does this document do? -This document enables \`Deletion Protection\` on a given Amazon RDS cluster by calling another SSM document. +This document blocks public access to all buckets by default at the account level. ## Input Parameters * Finding: (Required) Security Hub finding details JSON -* AutomationAssumeRole: (Optional) The ARN of the role that allows Automation to perform the actions on your behalf. -* RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output ## Documentation Links -* [AFSBP RDS.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-rds-8) +* [AWS FSBP v1.0.0 S3.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-1) ", "mainSteps": [ { @@ -12777,113 +21371,116 @@ This document enables \`Deletion Protection\` on a given Amazon RDS cluster by c "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "RDS.8", + "S3.1", ], "parse_id_pattern": "", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -12893,101 +21490,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -13009,18 +21647,13 @@ def parse_event(event, _): "Type": "StringMap", }, { - "Name": "RemediationAccount", - "Selector": "$.Payload.account_id", - "Type": "String", - }, - { - "Name": "RemediationRegion", - "Selector": "$.Payload.resource_region", - "Type": "String", + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", }, { - "Name": "DbiResourceId", - "Selector": "$.Payload.resource.Details.AwsRdsDbInstance.DbiResourceId", + "Name": "RemediationAccount", + "Selector": "$.Payload.account_id", "Type": "String", }, ], @@ -13028,23 +21661,15 @@ def parse_event(event, _): { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-EnableRDSInstanceDeletionProtection", + "DocumentName": "ASR-ConfigureS3PublicAccessBlock", "RuntimeParameters": { - "ApplyImmediately": true, + "AccountId": "{{ ParseInput.RemediationAccount }}", "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "DbInstanceResourceId": "{{ ParseInput.DbiResourceId }}", + "BlockPublicAcls": true, + "BlockPublicPolicy": true, + "IgnorePublicAcls": true, + "RestrictPublicBuckets": true, }, - "TargetLocations": [ - { - "Accounts": [ - "{{ ParseInput.RemediationAccount }}", - ], - "ExecutionRoleName": "{{ RemediationRoleName }}", - "Regions": [ - "{{ ParseInput.RemediationRegion }}", - ], - }, - ], }, "name": "Remediation", }, @@ -13059,8 +21684,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Enabled deletion protection on RDS instance", - "UpdatedBy": "ASR-PCI_3.2.1_RDS.8", + "Text": "Configured the account to block public S3 access.", + "UpdatedBy": "ASR-PCI_3.2.1_S3.1", }, "Service": "securityhub", "Workflow": { @@ -13082,12 +21707,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the RDS.8 finding", + "description": "The input from the Orchestrator Step function for the S3.1 finding", "type": "StringMap", }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-EnableRDSInstanceDeletionProtection", + "default": "SO0111-ConfigureS3PublicAccessBlock", "type": "String", }, }, @@ -13095,7 +21720,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_RDS.8", + "Name": "ASR-PCI_3.2.1_S3.1", "Tags": [ { "Key": "CdkGenerated", @@ -13106,26 +21731,29 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksRedshift1789871EB": { - "Condition": "ControlRunbooksEnableRedshift1Condition3449D560", + "ControlRunbooksS313756F060B": { + "Condition": "ControlRunbooksEnableS313ConditionA95162A4", "DependsOn": [ - "CreateWait6", + "CreateWait10", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.0_Redshift.1 + "description": "### Document Name - ASR-AFSBP_1.0.0_S3.13 ## What does this document do? -This document disables public access to a Redshift cluster by calling another SSM document +This document sets an example lifecycle policy that transfers objects greater than 10 GB to S3 Intelligent Tiering after 90 days. +It is recommended to set lifecycle policies appropriate for the objects stored in your S3 bucket. ## Input Parameters -* Finding: (Required) Security Hub finding details JSON -* AutomationAssumeRole: (Optional) The ARN of the role that allows Automation to perform the actions on your behalf. -* RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +* BucketName: (Required) Name of the S3 bucket. + +## Output Parameters +* Remediation.Output ## Documentation Links -* [AFSBP Redshift.4](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-redshift-1) +* [AWS FSBP v1.0.0 S3.11](https://docs.aws.amazon.com/securityhub/latest/userguide/s3-controls.html#s3-13) ", "mainSteps": [ { @@ -13135,113 +21763,116 @@ This document disables public access to a Redshift cluster by calling another SS "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "Redshift.1", + "S3.13", ], - "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):redshift:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:cluster:(?!.*--)([a-z][a-z0-9-]{0,62})(? 1: + return param.split(",") + return param + + +def get_input_params(event, _): + security_hub_input_params = event["SecHubInputParams"] + + default_params = event["DefaultParams"] + + input_params = {} + + for param in default_params: + if param in security_hub_input_params: + converted_param = parse_non_string_types(security_hub_input_params[param]) + input_params[param] = converted_param + else: + converted_param = parse_non_string_types(default_params[param]) + input_params[param] = converted_param + + return input_params +", + }, + "name": "GetInputParams", + "outputs": [ + { + "Name": "targetTransitionDays", + "Selector": "$.Payload.targetTransitionDays", + "Type": "Integer", + }, { - "Name": "RemediationAccount", - "Selector": "$.Payload.account_id", - "Type": "String", + "Name": "targetExpirationDays", + "Selector": "$.Payload.targetExpirationDays", + "Type": "Integer", }, { - "Name": "RemediationRegion", - "Selector": "$.Payload.resource_region", + "Name": "targetTransitionStorageClass", + "Selector": "$.Payload.targetTransitionStorageClass", "Type": "String", }, ], @@ -13386,22 +22123,14 @@ def parse_event(event, _): { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-DisablePublicAccessToRedshiftCluster", + "DocumentName": "ASR-SetS3LifecyclePolicy", "RuntimeParameters": { "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "ClusterIdentifier": "{{ ParseInput.ClusterIdentifier }}", + "BucketName": "{{ ParseInput.BucketName }}", + "TargetExpirationDays": "{{ GetInputParams.targetExpirationDays }}", + "TargetTransitionDays": "{{ GetInputParams.targetTransitionDays }}", + "TargetTransitionStorageClass": "{{ GetInputParams.targetTransitionStorageClass }}", }, - "TargetLocations": [ - { - "Accounts": [ - "{{ ParseInput.RemediationAccount }}", - ], - "ExecutionRoleName": "{{ RemediationRoleName }}", - "Regions": [ - "{{ ParseInput.RemediationRegion }}", - ], - }, - ], }, "name": "Remediation", }, @@ -13416,8 +22145,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Disabled public access to Redshift cluster", - "UpdatedBy": "ASR-PCI_3.2.1_Redshift.1", + "Text": "Setting an example lifecycle policy on the S3 bucket.", + "UpdatedBy": "ASR-PCI_3.2.1_S3.13", }, "Service": "securityhub", "Workflow": { @@ -13439,12 +22168,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the Redshift.1 finding", + "description": "The input from the Orchestrator Step function for the S3.13 finding", "type": "StringMap", }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-DisablePublicAccessToRedshiftCluster", + "default": "SO0111-SetS3LifecyclePolicy", "type": "String", }, }, @@ -13452,7 +22181,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_Redshift.1", + "Name": "ASR-PCI_3.2.1_S3.13", "Tags": [ { "Key": "CdkGenerated", @@ -13463,26 +22192,30 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksRedshift3106C10FF": { - "Condition": "ControlRunbooksEnableRedshift3ConditionC65BAEF6", + "ControlRunbooksS3260D6E897": { + "Condition": "ControlRunbooksEnableS32ConditionD6F8CCE9", "DependsOn": [ - "CreateWait6", + "CreateWait9", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.0_Redshift.3 + "description": "### Document Name - ASR-AFSBP_1.0.0_S3.2 ## What does this document do? -This document enables automatic snapshots on a Redshift cluster by calling another SSM document +This document blocks all public access to an S3 bucket. ## Input Parameters * Finding: (Required) Security Hub finding details JSON -* AutomationAssumeRole: (Optional) The ARN of the role that allows Automation to perform the actions on your behalf. -* RemediationRoleName: (Optional) The name of the role that allows Automation to remediate the finding on your behalf. +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* Remediation.Output ## Documentation Links -* [AFSBP Redshift.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-redshift-3) +* [AWS FSBP v1.0.0 S3.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-2) +* [AWS FSBP v1.0.0 S3.3](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-3) +* [AWS FSBP v1.0.0 S3.8](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-s3-8) ", "mainSteps": [ { @@ -13492,113 +22225,118 @@ This document enables automatic snapshots on a Redshift cluster by calling anoth "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "Redshift.3", + "S3.2", + "S3.3", + "S3.8", ], - "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):redshift:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:cluster:(?!.*--)([a-z][a-z0-9-]{0,62})(? 1: + return param.split(",") + return param + + +def get_input_params(event, _): + security_hub_input_params = event["SecHubInputParams"] + + default_params = event["DefaultParams"] + + input_params = {} + + for param in default_params: + if param in security_hub_input_params: + converted_param = parse_non_string_types(security_hub_input_params[param]) + input_params[param] = converted_param + else: + converted_param = parse_non_string_types(default_params[param]) + input_params[param] = converted_param + + return input_params +", + }, + "name": "GetInputParams", + "outputs": [ + { + "Name": "maximumAllowedRotationFrequency", + "Selector": "$.Payload.maximumAllowedRotationFrequency", + "Type": "Integer", + }, + ], + }, { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-EnableEncryptionForSNSTopic", + "DocumentName": "ASR-EnableAutoSecretRotation", "RuntimeParameters": { "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "KmsKeyArn": "{{ KmsKeyArn }}", - "SNSTopicArn": "{{ ParseInput.SNSTopicArn }}", + "MaximumAllowedRotationFrequency": "{{ GetInputParams.maximumAllowedRotationFrequency }}", + "SecretARN": "{{ ParseInput.SecretARN }}", }, "TargetLocations": [ { @@ -16797,8 +25877,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Encryption enabled on SNS Topic", - "UpdatedBy": "ASR-PCI_3.2.1_SNS.1", + "Text": "Enabled automatic rotation on secret and set schedule to 90 days.", + "UpdatedBy": "ASR-PCI_3.2.1_SecretsManager.1", }, "Service": "securityhub", "Workflow": { @@ -16820,17 +25900,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the SNS.1 finding", + "description": "The input from the Orchestrator Step function for the SecretsManager.1 finding", "type": "StringMap", }, - "KmsKeyArn": { - "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):kms:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:(?:(?:alias\\/[A-Za-z0-9/-_])|(?:key\\/(?:[0-9a-fA-F]{8}-(?:[0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12})))$", - "default": "{{ssm:/Solutions/SO0111/CMK_REMEDIATION_ARN}}", - "type": "String", - }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-EnableEncryptionForSNSTopic", + "default": "SO0111-EnableAutoSecretRotation", "type": "String", }, }, @@ -16838,7 +25913,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_SNS.1", + "Name": "ASR-PCI_3.2.1_SecretsManager.1", "Tags": [ { "Key": "CdkGenerated", @@ -16849,28 +25924,29 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksSNS2112179CC": { - "Condition": "ControlRunbooksEnableSNS2Condition69621468", + "ControlRunbooksSecretsManager313120200": { + "Condition": "ControlRunbooksEnableSecretsManager3Condition04E1FFBB", "DependsOn": [ - "CreateWait8", + "CreateWait11", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": " ### Document Name - ASR-AFSBP_1.0.0_SNS.2 - - ## What does this document do? - This document enables logging of delivery status for notification messages sent to a topic. - - ## Input Parameters - * Finding: (Required) Security Hub finding details JSON - * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. - - ## Output Parameters - * Remediation.Output - - ## Documentation Links - * [AFSBP v1.0.0 SNS.2](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-sns-2)", + "description": "### Document Name - ASR-AFSBP_1.0.0_SecretsManager.3 + +## What does this document do? +This document deletes a secret that has been unused for the number of days specified in the unusedForDays parameter (Default: 90 days). + +## Input Parameters +* Finding: (Required) Security Hub finding details JSON +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +* SecretARN: (Required) The ARN of the Secrets Manager secret. + +## Output Parameters +* Remediation.Output + +## Documentation Links +* [AFSBP v1.0.0 SecretsManager.3](https://docs.aws.amazon.com/securityhub/latest/userguide/secretsmanager-controls.html#secretsmanager-3)", "mainSteps": [ { "action": "aws:executeScript", @@ -16879,113 +25955,116 @@ def parse_event(event, _): "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "SNS.2", + "SecretsManager.3", ], "parse_id_pattern": "", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -16995,101 +26074,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -17111,7 +26231,12 @@ def parse_event(event, _): "Type": "StringMap", }, { - "Name": "SNSTopicArn", + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "SecretARN", "Selector": "$.Payload.resource_id", "Type": "String", }, @@ -17127,14 +26252,72 @@ def parse_event(event, _): }, ], }, + { + "action": "aws:executeScript", + "inputs": { + "Handler": "get_input_params", + "InputPayload": { + "DefaultParams": { + "unusedForDays": 90, + }, + "SecHubInputParams": "{{ ParseInput.InputParams }}", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import re + + +def parse_non_string_types(param): + if re.match("^\\d+$", str(param)): + param = int(param) + return param + if param == "true" or param == "True": + return True + if param == "false" or param == "False": + return False + if isinstance(param, list): + return param + if len(param.split(",")) > 1: + return param.split(",") + return param + + +def get_input_params(event, _): + security_hub_input_params = event["SecHubInputParams"] + + default_params = event["DefaultParams"] + + input_params = {} + + for param in default_params: + if param in security_hub_input_params: + converted_param = parse_non_string_types(security_hub_input_params[param]) + input_params[param] = converted_param + else: + converted_param = parse_non_string_types(default_params[param]) + input_params[param] = converted_param + + return input_params +", + }, + "name": "GetInputParams", + "outputs": [ + { + "Name": "UnusedForDays", + "Selector": "$.Payload.unusedForDays", + "Type": "StringList", + }, + ], + }, { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-EnableDeliveryLoggingForSNSTopic", + "DocumentName": "ASR-RemoveUnusedSecret", "RuntimeParameters": { "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "LoggingRole": "{{ LoggingRole }}", - "SNSTopicArn": "{{ ParseInput.SNSTopicArn }}", + "SecretARN": "{{ ParseInput.SecretARN }}", + "UnusedForDays": "{{ GetInputParams.UnusedForDays }}", }, "TargetLocations": [ { @@ -17161,8 +26344,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Delivery Status Logging enabled on SNS Topic", - "UpdatedBy": "ASR-PCI_3.2.1_SNS.2", + "Text": "Removed the unused secret.", + "UpdatedBy": "ASR-PCI_3.2.1_SecretsManager.3", }, "Service": "securityhub", "Workflow": { @@ -17184,17 +26367,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the SNS.2 finding", + "description": "The input from the Orchestrator Step function for the SecretsManager.3 finding", "type": "StringMap", }, - "LoggingRole": { - "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role/[\\w+=,.@-]+$", - "default": "{{ssm:/Solutions/SO0111/DeliveryStatusLoggingRole}}", - "type": "String", - }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-EnableDeliveryLoggingForSNSTopic", + "default": "SO0111-RemoveUnusedSecret", "type": "String", }, }, @@ -17202,7 +26380,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_SNS.2", + "Name": "ASR-PCI_3.2.1_SecretsManager.3", "Tags": [ { "Key": "CdkGenerated", @@ -17213,29 +26391,28 @@ def parse_event(event, _): }, "Type": "AWS::SSM::Document", }, - "ControlRunbooksSQS173AA7C81": { - "Condition": "ControlRunbooksEnableSQS1Condition3065B4F2", + "ControlRunbooksSecretsManager42177BC07": { + "Condition": "ControlRunbooksEnableSecretsManager4ConditionCE71F44A", "DependsOn": [ - "CreateWait8", + "CreateWait11", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document Name - ASR-AFSBP_1.0.0_SQS.1 - + "description": "### Document Name - ASR-AFSBP_1.0.0_SecretsManager.4 + ## What does this document do? -This document enables encryption at rest using AWS KMS for SQS Queues. - +This document rotates a secret and sets its rotation period to 90 days. + ## Input Parameters * Finding: (Required) Security Hub finding details JSON * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. - + ## Output Parameters * Remediation.Output - + ## Documentation Links -* [AFSBP v1.0.0 SQS.1](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#fsbp-sqs-1) -", +* [AFSBP v1.0.0 SecretsManager.4](https://docs.aws.amazon.com/securityhub/latest/userguide/secretsmanager-controls.html#secretsmanager-4)", "mainSteps": [ { "action": "aws:executeScript", @@ -17244,113 +26421,116 @@ This document enables encryption at rest using AWS KMS for SQS Queues. "InputPayload": { "Finding": "{{ Finding }}", "expected_control_id": [ - "SQS.1", + "SecretsManager.4", ], - "parse_id_pattern": "^arn:(?:aws|aws-us-gov|aws-cn):sqs:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:([a-zA-Z0-9_-]{1,80}(?:\\.fifo)?)$", + "parse_id_pattern": "", }, "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:'+ - 'subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:" + + "subscription/(.*?)/v/(\\d+\\.\\d+\\.\\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -17360,101 +26540,142 @@ class FindingEvent: match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } ", }, @@ -17476,7 +26697,12 @@ def parse_event(event, _): "Type": "StringMap", }, { - "Name": "SQSQueueName", + "Name": "InputParams", + "Selector": "$.Payload.input_params", + "Type": "StringMap", + }, + { + "Name": "SecretARN", "Selector": "$.Payload.resource_id", "Type": "String", }, @@ -17492,15 +26718,72 @@ def parse_event(event, _): }, ], }, + { + "action": "aws:executeScript", + "inputs": { + "Handler": "get_input_params", + "InputPayload": { + "DefaultParams": { + "maxDaysSinceRotation": 90, + }, + "SecHubInputParams": "{{ ParseInput.InputParams }}", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import re + + +def parse_non_string_types(param): + if re.match("^\\d+$", str(param)): + param = int(param) + return param + if param == "true" or param == "True": + return True + if param == "false" or param == "False": + return False + if isinstance(param, list): + return param + if len(param.split(",")) > 1: + return param.split(",") + return param + + +def get_input_params(event, _): + security_hub_input_params = event["SecHubInputParams"] + + default_params = event["DefaultParams"] + + input_params = {} + + for param in default_params: + if param in security_hub_input_params: + converted_param = parse_non_string_types(security_hub_input_params[param]) + input_params[param] = converted_param + else: + converted_param = parse_non_string_types(default_params[param]) + input_params[param] = converted_param + + return input_params +", + }, + "name": "GetInputParams", + "outputs": [ + { + "Name": "MaxDaysSinceRotation", + "Selector": "$.Payload.maxDaysSinceRotation", + "Type": "StringList", + }, + ], + }, { "action": "aws:executeAutomation", "inputs": { - "DocumentName": "ASR-EnableEncryptionForSQSQueue", + "DocumentName": "ASR-UpdateSecretRotationPeriod", "RuntimeParameters": { "AutomationAssumeRole": "arn:{{ global:AWS_PARTITION }}:iam::{{ global:ACCOUNT_ID }}:role/{{ RemediationRoleName }}", - "KmsKeyArn": "{{ KmsKeyArn }}", - "SNSTopicArn": "{{ ParseInput.SQSQueueName }}", - "SQSQueueName": "{{ ParseInput.SQSQueueName }}", + "MaxDaysSinceRotation": "{{ GetInputParams.MaxDaysSinceRotation }}", + "SecretARN": "{{ ParseInput.SecretARN }}", }, "TargetLocations": [ { @@ -17527,8 +26810,8 @@ def parse_event(event, _): }, ], "Note": { - "Text": "Encryption enabled on SQS Topic", - "UpdatedBy": "ASR-PCI_3.2.1_SQS.1", + "Text": "Rotated secret and set rotation schedule to 90 days.", + "UpdatedBy": "ASR-PCI_3.2.1_SecretsManager.4", }, "Service": "securityhub", "Workflow": { @@ -17550,17 +26833,12 @@ def parse_event(event, _): "type": "String", }, "Finding": { - "description": "The input from the Orchestrator Step function for the SQS.1 finding", + "description": "The input from the Orchestrator Step function for the SecretsManager.4 finding", "type": "StringMap", }, - "KmsKeyArn": { - "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):kms:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:(?:(?:alias\\/[A-Za-z0-9/-_])|(?:key\\/(?:[0-9a-fA-F]{8}-(?:[0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12})))$", - "default": "{{ssm:/Solutions/SO0111/CMK_REMEDIATION_ARN}}", - "type": "String", - }, "RemediationRoleName": { "allowedPattern": "^[\\w+=,.@-]+$", - "default": "SO0111-EnableEncryptionForSQSQueue", + "default": "SO0111-UpdateSecretRotationPeriod", "type": "String", }, }, @@ -17568,7 +26846,7 @@ def parse_event(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-PCI_3.2.1_SQS.1", + "Name": "ASR-PCI_3.2.1_SecretsManager.4", "Tags": [ { "Key": "CdkGenerated", @@ -17584,7 +26862,7 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 1, "DeleteIntervalSeconds": 0, - "DocumentPropertiesHash": "11dc0ae816812ef227e9a7bd3a337d46b3b7c94d23b76ecdc435757c227797ff", + "DocumentPropertiesHash": "4b7174bf7df06204923aaa452028938af0579301c000b39adf06e7d0a02a5b78", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -17601,7 +26879,58 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 1, "DeleteIntervalSeconds": 0, - "DocumentPropertiesHash": "53f801b967c642ca21ddb9491ddf4a80c610bd2342d34f42ac499e3ce5cc710b", + "DocumentPropertiesHash": "f27c4667d9684d512d3228df8782764776bce67684ab8364452d66661d2557c1", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 1, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "CreateWait10": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "CreateWait9", + ], + "Properties": { + "CreateIntervalSeconds": 1, + "DeleteIntervalSeconds": 0, + "DocumentPropertiesHash": "78ebfcb4f18551ede91aacd052b4a9b867fd13acdb795129b881eed196002209", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 1, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "CreateWait11": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "CreateWait10", + ], + "Properties": { + "CreateIntervalSeconds": 1, + "DeleteIntervalSeconds": 0, + "DocumentPropertiesHash": "7b6ad0f91d18f25741e1adfc72c3ff19f80fcaada059ea30cb51372f47a0ef0d", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 1, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "CreateWait12": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "CreateWait11", + ], + "Properties": { + "CreateIntervalSeconds": 1, + "DeleteIntervalSeconds": 0, + "DocumentPropertiesHash": "4eac44434d44110b19dc927f8f49b0e582ceca786543c4d165423fe47764de88", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -17618,7 +26947,7 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 1, "DeleteIntervalSeconds": 0, - "DocumentPropertiesHash": "da23d6295b0210b134873b98ca2dd6ffd898bf5ed1f0ccc8bdeab41eedc8d8dd", + "DocumentPropertiesHash": "e8a667054e9639326c05d3f6728e058949ff173f45aae17eb6cddf3472231e55", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -17635,7 +26964,7 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 1, "DeleteIntervalSeconds": 0, - "DocumentPropertiesHash": "ac944d9f5dc7f804a638b1f1918c19340da97d041d10594f767afd4031ae66ca", + "DocumentPropertiesHash": "aa761551b20e345abd8a40d1aa8e6ae70e245202ec12dedb88e559f5d446015a", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -17652,7 +26981,7 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 1, "DeleteIntervalSeconds": 0, - "DocumentPropertiesHash": "8905483d919c2ed52514bf7a8d5d06edac171820c6e9df1d6be71b5cf084c078", + "DocumentPropertiesHash": "88e5bba4283d43c36a2ed973b6ac17e6ae5991b73067123b31c2a30ffccef2fe", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -17669,7 +26998,7 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 1, "DeleteIntervalSeconds": 0, - "DocumentPropertiesHash": "bfdb7dd6e31ec8a0a2ba5ccebd9d8ff7e6756d2ec4b7e1cb66d9442e99ab3477", + "DocumentPropertiesHash": "cb2ea03ca61a8e08aa75ab9570c6ab34acc56fc0c2e8fc9c11d8a888ef55bcee", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -17686,7 +27015,7 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 1, "DeleteIntervalSeconds": 0, - "DocumentPropertiesHash": "2bfbbfd46c1cc7f42d0c4620b15883e0191e3920428e4308f63110ba7f9dc9d3", + "DocumentPropertiesHash": "8905483d919c2ed52514bf7a8d5d06edac171820c6e9df1d6be71b5cf084c078", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -17703,7 +27032,7 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 1, "DeleteIntervalSeconds": 0, - "DocumentPropertiesHash": "25edbc9c0ed41cf2d1104405506acd9b2506e9d7b5d0c66e0a0821fbc18ec83e", + "DocumentPropertiesHash": "bfdb7dd6e31ec8a0a2ba5ccebd9d8ff7e6756d2ec4b7e1cb66d9442e99ab3477", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -17720,7 +27049,24 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 1, "DeleteIntervalSeconds": 0, - "DocumentPropertiesHash": "f625c716502cc319bfe6790f29613a6647ee44dac20deb98c0268a18e3131ad2", + "DocumentPropertiesHash": "2bfbbfd46c1cc7f42d0c4620b15883e0191e3920428e4308f63110ba7f9dc9d3", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 1, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "CreateWait9": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "CreateWait8", + ], + "Properties": { + "CreateIntervalSeconds": 1, + "DeleteIntervalSeconds": 0, + "DocumentPropertiesHash": "25edbc9c0ed41cf2d1104405506acd9b2506e9d7b5d0c66e0a0821fbc18ec83e", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -17737,7 +27083,7 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 0, "DeleteIntervalSeconds": 0.5, - "DocumentPropertiesHash": "11dc0ae816812ef227e9a7bd3a337d46b3b7c94d23b76ecdc435757c227797ff", + "DocumentPropertiesHash": "4b7174bf7df06204923aaa452028938af0579301c000b39adf06e7d0a02a5b78", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -17755,7 +27101,61 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 0, "DeleteIntervalSeconds": 0.5, - "DocumentPropertiesHash": "53f801b967c642ca21ddb9491ddf4a80c610bd2342d34f42ac499e3ce5cc710b", + "DocumentPropertiesHash": "f27c4667d9684d512d3228df8782764776bce67684ab8364452d66661d2557c1", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 0, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "DeletWait10": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "DeletWait9", + "Gate10", + ], + "Properties": { + "CreateIntervalSeconds": 0, + "DeleteIntervalSeconds": 0.5, + "DocumentPropertiesHash": "78ebfcb4f18551ede91aacd052b4a9b867fd13acdb795129b881eed196002209", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 0, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "DeletWait11": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "DeletWait10", + "Gate11", + ], + "Properties": { + "CreateIntervalSeconds": 0, + "DeleteIntervalSeconds": 0.5, + "DocumentPropertiesHash": "7b6ad0f91d18f25741e1adfc72c3ff19f80fcaada059ea30cb51372f47a0ef0d", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 0, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "DeletWait12": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "DeletWait11", + "Gate12", + ], + "Properties": { + "CreateIntervalSeconds": 0, + "DeleteIntervalSeconds": 0.5, + "DocumentPropertiesHash": "4eac44434d44110b19dc927f8f49b0e582ceca786543c4d165423fe47764de88", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -17773,7 +27173,7 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 0, "DeleteIntervalSeconds": 0.5, - "DocumentPropertiesHash": "da23d6295b0210b134873b98ca2dd6ffd898bf5ed1f0ccc8bdeab41eedc8d8dd", + "DocumentPropertiesHash": "e8a667054e9639326c05d3f6728e058949ff173f45aae17eb6cddf3472231e55", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -17791,7 +27191,7 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 0, "DeleteIntervalSeconds": 0.5, - "DocumentPropertiesHash": "ac944d9f5dc7f804a638b1f1918c19340da97d041d10594f767afd4031ae66ca", + "DocumentPropertiesHash": "aa761551b20e345abd8a40d1aa8e6ae70e245202ec12dedb88e559f5d446015a", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -17809,7 +27209,7 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 0, "DeleteIntervalSeconds": 0.5, - "DocumentPropertiesHash": "8905483d919c2ed52514bf7a8d5d06edac171820c6e9df1d6be71b5cf084c078", + "DocumentPropertiesHash": "88e5bba4283d43c36a2ed973b6ac17e6ae5991b73067123b31c2a30ffccef2fe", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -17827,7 +27227,7 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 0, "DeleteIntervalSeconds": 0.5, - "DocumentPropertiesHash": "bfdb7dd6e31ec8a0a2ba5ccebd9d8ff7e6756d2ec4b7e1cb66d9442e99ab3477", + "DocumentPropertiesHash": "cb2ea03ca61a8e08aa75ab9570c6ab34acc56fc0c2e8fc9c11d8a888ef55bcee", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -17845,7 +27245,25 @@ def parse_event(event, _): "Properties": { "CreateIntervalSeconds": 0, "DeleteIntervalSeconds": 0.5, - "DocumentPropertiesHash": "2bfbbfd46c1cc7f42d0c4620b15883e0191e3920428e4308f63110ba7f9dc9d3", + "DocumentPropertiesHash": "8905483d919c2ed52514bf7a8d5d06edac171820c6e9df1d6be71b5cf084c078", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 0, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "DeletWait7": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "DeletWait6", + "Gate7", + ], + "Properties": { + "CreateIntervalSeconds": 0, + "DeleteIntervalSeconds": 0.5, + "DocumentPropertiesHash": "bfdb7dd6e31ec8a0a2ba5ccebd9d8ff7e6756d2ec4b7e1cb66d9442e99ab3477", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -17854,16 +27272,16 @@ def parse_event(event, _): "Type": "Custom::Wait", "UpdateReplacePolicy": "Delete", }, - "DeletWait7": { + "DeletWait8": { "DeletionPolicy": "Delete", "DependsOn": [ - "DeletWait6", - "Gate7", + "DeletWait7", + "Gate8", ], "Properties": { "CreateIntervalSeconds": 0, "DeleteIntervalSeconds": 0.5, - "DocumentPropertiesHash": "25edbc9c0ed41cf2d1104405506acd9b2506e9d7b5d0c66e0a0821fbc18ec83e", + "DocumentPropertiesHash": "2bfbbfd46c1cc7f42d0c4620b15883e0191e3920428e4308f63110ba7f9dc9d3", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -17872,16 +27290,16 @@ def parse_event(event, _): "Type": "Custom::Wait", "UpdateReplacePolicy": "Delete", }, - "DeletWait8": { + "DeletWait9": { "DeletionPolicy": "Delete", "DependsOn": [ - "DeletWait7", - "Gate8", + "DeletWait8", + "Gate9", ], "Properties": { "CreateIntervalSeconds": 0, "DeleteIntervalSeconds": 0.5, - "DocumentPropertiesHash": "f625c716502cc319bfe6790f29613a6647ee44dac20deb98c0268a18e3131ad2", + "DocumentPropertiesHash": "25edbc9c0ed41cf2d1104405506acd9b2506e9d7b5d0c66e0a0821fbc18ec83e", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -17910,6 +27328,24 @@ def parse_event(event, _): "", ], }, + "ControlRunbooksCloudFront116F66FF8Ready": { + "Fn::If": [ + "ControlRunbooksEnableCloudFront1ConditionD78B5553", + { + "Ref": "ControlRunbooksCloudFront116F66FF8", + }, + "", + ], + }, + "ControlRunbooksCloudFront1283E53E96Ready": { + "Fn::If": [ + "ControlRunbooksEnableCloudFront12Condition59835E00", + { + "Ref": "ControlRunbooksCloudFront1283E53E96", + }, + "", + ], + }, "ControlRunbooksCloudTrail1B15F1A13Ready": { "Fn::If": [ "ControlRunbooksEnableCloudTrail1ConditionB7EBAA86", @@ -17919,6 +27355,11 @@ def parse_event(event, _): "", ], }, + }, + "Type": "AWS::CloudFormation::WaitConditionHandle", + }, + "Gate1": { + "Metadata": { "ControlRunbooksCloudTrail2979D0B5DReady": { "Fn::If": [ "ControlRunbooksEnableCloudTrail2ConditionC182A10F", @@ -17937,11 +27378,6 @@ def parse_event(event, _): "", ], }, - }, - "Type": "AWS::CloudFormation::WaitConditionHandle", - }, - "Gate1": { - "Metadata": { "ControlRunbooksCloudTrail54F5ED8E4Ready": { "Fn::If": [ "ControlRunbooksEnableCloudTrail5Condition17B6B536", @@ -17969,6 +27405,125 @@ def parse_event(event, _): "", ], }, + }, + "Type": "AWS::CloudFormation::WaitConditionHandle", + }, + "Gate10": { + "Metadata": { + "ControlRunbooksS31114BF0AC9Ready": { + "Fn::If": [ + "ControlRunbooksEnableS311Condition6AA79443", + { + "Ref": "ControlRunbooksS31114BF0AC9", + }, + "", + ], + }, + "ControlRunbooksS313756F060BReady": { + "Fn::If": [ + "ControlRunbooksEnableS313ConditionA95162A4", + { + "Ref": "ControlRunbooksS313756F060B", + }, + "", + ], + }, + "ControlRunbooksS356959B795Ready": { + "Fn::If": [ + "ControlRunbooksEnableS35ConditionD5E024B6", + { + "Ref": "ControlRunbooksS356959B795", + }, + "", + ], + }, + "ControlRunbooksS360762680AReady": { + "Fn::If": [ + "ControlRunbooksEnableS36ConditionD22273E2", + { + "Ref": "ControlRunbooksS360762680A", + }, + "", + ], + }, + "ControlRunbooksSecretsManager13D89C735Ready": { + "Fn::If": [ + "ControlRunbooksEnableSecretsManager1ConditionCE635AAF", + { + "Ref": "ControlRunbooksSecretsManager13D89C735", + }, + "", + ], + }, + }, + "Type": "AWS::CloudFormation::WaitConditionHandle", + }, + "Gate11": { + "Metadata": { + "ControlRunbooksSNS145784CBBReady": { + "Fn::If": [ + "ControlRunbooksEnableSNS1Condition7720D1CC", + { + "Ref": "ControlRunbooksSNS145784CBB", + }, + "", + ], + }, + "ControlRunbooksSNS2112179CCReady": { + "Fn::If": [ + "ControlRunbooksEnableSNS2Condition69621468", + { + "Ref": "ControlRunbooksSNS2112179CC", + }, + "", + ], + }, + "ControlRunbooksSQS173AA7C81Ready": { + "Fn::If": [ + "ControlRunbooksEnableSQS1Condition3065B4F2", + { + "Ref": "ControlRunbooksSQS173AA7C81", + }, + "", + ], + }, + "ControlRunbooksSecretsManager313120200Ready": { + "Fn::If": [ + "ControlRunbooksEnableSecretsManager3Condition04E1FFBB", + { + "Ref": "ControlRunbooksSecretsManager313120200", + }, + "", + ], + }, + "ControlRunbooksSecretsManager42177BC07Ready": { + "Fn::If": [ + "ControlRunbooksEnableSecretsManager4ConditionCE71F44A", + { + "Ref": "ControlRunbooksSecretsManager42177BC07", + }, + "", + ], + }, + }, + "Type": "AWS::CloudFormation::WaitConditionHandle", + }, + "Gate12": { + "Metadata": { + "ControlRunbooksSSM442CDFB67Ready": { + "Fn::If": [ + "ControlRunbooksEnableSSM4ConditionD47FCFB5", + { + "Ref": "ControlRunbooksSSM442CDFB67", + }, + "", + ], + }, + }, + "Type": "AWS::CloudFormation::WaitConditionHandle", + }, + "Gate2": { + "Metadata": { "ControlRunbooksCloudWatch1A05F543AReady": { "Fn::If": [ "ControlRunbooksEnableCloudWatch1ConditionAB0DF2E5", @@ -17987,11 +27542,15 @@ def parse_event(event, _): "", ], }, - }, - "Type": "AWS::CloudFormation::WaitConditionHandle", - }, - "Gate2": { - "Metadata": { + "ControlRunbooksCodeBuild509682556Ready": { + "Fn::If": [ + "ControlRunbooksEnableCodeBuild5Condition5FF93A0A", + { + "Ref": "ControlRunbooksCodeBuild509682556", + }, + "", + ], + }, "ControlRunbooksConfig1512B566FReady": { "Fn::If": [ "ControlRunbooksEnableConfig1Condition8CEB8627", @@ -18010,6 +27569,11 @@ def parse_event(event, _): "", ], }, + }, + "Type": "AWS::CloudFormation::WaitConditionHandle", + }, + "Gate3": { + "Metadata": { "ControlRunbooksEC22ED852ADFReady": { "Fn::If": [ "ControlRunbooksEnableEC22ConditionB9E0D42E", @@ -18019,6 +27583,15 @@ def parse_event(event, _): "", ], }, + "ControlRunbooksEC247C182546Ready": { + "Fn::If": [ + "ControlRunbooksEnableEC24Condition72408A1B", + { + "Ref": "ControlRunbooksEC247C182546", + }, + "", + ], + }, "ControlRunbooksEC267E3087AEReady": { "Fn::If": [ "ControlRunbooksEnableEC26ConditionF1F880B0", @@ -18037,10 +27610,19 @@ def parse_event(event, _): "", ], }, + "ControlRunbooksEC287C39A9F1Ready": { + "Fn::If": [ + "ControlRunbooksEnableEC28Condition4C4640B8", + { + "Ref": "ControlRunbooksEC287C39A9F1", + }, + "", + ], + }, }, "Type": "AWS::CloudFormation::WaitConditionHandle", }, - "Gate3": { + "Gate4": { "Metadata": { "ControlRunbooksEC213D7C9C1EBReady": { "Fn::If": [ @@ -18060,6 +27642,56 @@ def parse_event(event, _): "", ], }, + "ControlRunbooksEC218DB9589DDReady": { + "Fn::If": [ + "ControlRunbooksEnableEC218Condition903B1C90", + { + "Ref": "ControlRunbooksEC218DB9589DD", + }, + "", + ], + }, + "ControlRunbooksEC2197047C726Ready": { + "Fn::If": [ + "ControlRunbooksEnableEC219Condition2421DE99", + { + "Ref": "ControlRunbooksEC2197047C726", + }, + "", + ], + }, + "ControlRunbooksEC223EAFC5818Ready": { + "Fn::If": [ + "ControlRunbooksEnableEC223Condition795CB580", + { + "Ref": "ControlRunbooksEC223EAFC5818", + }, + "", + ], + }, + }, + "Type": "AWS::CloudFormation::WaitConditionHandle", + }, + "Gate5": { + "Metadata": { + "ControlRunbooksECR16DEF82C5Ready": { + "Fn::If": [ + "ControlRunbooksEnableECR1Condition70BCAF70", + { + "Ref": "ControlRunbooksECR16DEF82C5", + }, + "", + ], + }, + "ControlRunbooksGuardDuty15E0D2BEAReady": { + "Fn::If": [ + "ControlRunbooksEnableGuardDuty1Condition97849740", + { + "Ref": "ControlRunbooksGuardDuty15E0D2BEA", + }, + "", + ], + }, "ControlRunbooksIAM3DC25477EReady": { "Fn::If": [ "ControlRunbooksEnableIAM3Condition3AA0E892", @@ -18090,7 +27722,7 @@ def parse_event(event, _): }, "Type": "AWS::CloudFormation::WaitConditionHandle", }, - "Gate4": { + "Gate6": { "Metadata": { "ControlRunbooksIAM18ACE62321Ready": { "Fn::If": [ @@ -18140,7 +27772,7 @@ def parse_event(event, _): }, "Type": "AWS::CloudFormation::WaitConditionHandle", }, - "Gate5": { + "Gate7": { "Metadata": { "ControlRunbooksRDS2FBE04686Ready": { "Fn::If": [ @@ -18190,7 +27822,7 @@ def parse_event(event, _): }, "Type": "AWS::CloudFormation::WaitConditionHandle", }, - "Gate6": { + "Gate8": { "Metadata": { "ControlRunbooksRDS13FCEA51BDReady": { "Fn::If": [ @@ -18240,7 +27872,7 @@ def parse_event(event, _): }, "Type": "AWS::CloudFormation::WaitConditionHandle", }, - "Gate7": { + "Gate9": { "Metadata": { "ControlRunbooksRedshift475A78168Ready": { "Fn::If": [ @@ -18290,56 +27922,6 @@ def parse_event(event, _): }, "Type": "AWS::CloudFormation::WaitConditionHandle", }, - "Gate8": { - "Metadata": { - "ControlRunbooksS356959B795Ready": { - "Fn::If": [ - "ControlRunbooksEnableS35ConditionD5E024B6", - { - "Ref": "ControlRunbooksS356959B795", - }, - "", - ], - }, - "ControlRunbooksS360762680AReady": { - "Fn::If": [ - "ControlRunbooksEnableS36ConditionD22273E2", - { - "Ref": "ControlRunbooksS360762680A", - }, - "", - ], - }, - "ControlRunbooksSNS145784CBBReady": { - "Fn::If": [ - "ControlRunbooksEnableSNS1Condition7720D1CC", - { - "Ref": "ControlRunbooksSNS145784CBB", - }, - "", - ], - }, - "ControlRunbooksSNS2112179CCReady": { - "Fn::If": [ - "ControlRunbooksEnableSNS2Condition69621468", - { - "Ref": "ControlRunbooksSNS2112179CC", - }, - "", - ], - }, - "ControlRunbooksSQS173AA7C81Ready": { - "Fn::If": [ - "ControlRunbooksEnableSQS1Condition3065B4F2", - { - "Ref": "ControlRunbooksSQS173AA7C81", - }, - "", - ], - }, - }, - "Type": "AWS::CloudFormation::WaitConditionHandle", - }, }, } `; diff --git a/source/playbooks/common/cloudwatch_get_input_values.py b/source/playbooks/common/cloudwatch_get_input_values.py index 1d6650ef..9e9287a1 100644 --- a/source/playbooks/common/cloudwatch_get_input_values.py +++ b/source/playbooks/common/cloudwatch_get_input_values.py @@ -2,203 +2,206 @@ # SPDX-License-Identifier: Apache-2.0 unauthorizedAPICallsFilter = { - "filter_name": "UnauthorizedAPICalls", - "filter_pattern": '{($.errorCode="*UnauthorizedOperation") || ($.errorCode="AccessDenied*")}', - "metric_name": "UnauthorizedAPICalls", - "metric_value": 1, - "alarm_name": "UnauthorizedAPICalls", - "alarm_desc": "Alarm for UnauthorizedAPICalls > 0", - "alarm_threshold": 1 + "filter_name": "UnauthorizedAPICalls", + "filter_pattern": '{($.errorCode="*UnauthorizedOperation") || ($.errorCode="AccessDenied*")}', + "metric_name": "UnauthorizedAPICalls", + "metric_value": 1, + "alarm_name": "UnauthorizedAPICalls", + "alarm_desc": "Alarm for UnauthorizedAPICalls > 0", + "alarm_threshold": 1, } consoleSignInWithoutMFAFilter = { - "filter_name": "ConsoleSigninWithoutMFA", - "filter_pattern": '{($.eventName="ConsoleLogin") && ($.additionalEventData.MFAUsed !="Yes")}', - "metric_name": "ConsoleSigninWithoutMFA", - "metric_value": 1, - "alarm_name": "ConsoleSigninWithoutMFA", - "alarm_desc": "Alarm for ConsoleSigninWithoutMFA > 0", - "alarm_threshold": 1 - } + "filter_name": "ConsoleSigninWithoutMFA", + "filter_pattern": '{($.eventName="ConsoleLogin") && ($.additionalEventData.MFAUsed !="Yes")}', + "metric_name": "ConsoleSigninWithoutMFA", + "metric_value": 1, + "alarm_name": "ConsoleSigninWithoutMFA", + "alarm_desc": "Alarm for ConsoleSigninWithoutMFA > 0", + "alarm_threshold": 1, +} rootAccountUsageFilter = { - "filter_name": "RootAccountUsage", - "filter_pattern": '{$.userIdentity.type="Root" && $.userIdentity.invokedBy NOT EXISTS && $.eventType !="AwsServiceEvent"}', - "metric_name": "RootAccountUsage", - "metric_value": 1, - "alarm_name": "RootAccountUsage", - "alarm_desc": "Alarm for RootAccountUsage > 0", - "alarm_threshold": 1 + "filter_name": "RootAccountUsage", + "filter_pattern": '{$.userIdentity.type="Root" && $.userIdentity.invokedBy NOT EXISTS && $.eventType !="AwsServiceEvent"}', + "metric_name": "RootAccountUsage", + "metric_value": 1, + "alarm_name": "RootAccountUsage", + "alarm_desc": "Alarm for RootAccountUsage > 0", + "alarm_threshold": 1, } iamPolicyChangesFilter = { - "filter_name": "IAMPolicyChanges", - "filter_pattern": '{($.eventName=DeleteGroupPolicy) || ($.eventName=DeleteRolePolicy) || ($.eventName=DeleteUserPolicy) || ($.eventName=PutGroupPolicy) || ($.eventName=PutRolePolicy) || ($.eventName=PutUserPolicy) || ($.eventName=CreatePolicy) || ($.eventName=DeletePolicy) || ($.eventName=CreatePolicyVersion) || ($.eventName=DeletePolicyVersion) || ($.eventName=AttachRolePolicy) || ($.eventName=DetachRolePolicy) || ($.eventName=AttachUserPolicy) || ($.eventName=DetachUserPolicy) || ($.eventName=AttachGroupPolicy) || ($.eventName=DetachGroupPolicy)}', - "metric_name": "IAMPolicyChanges", - "metric_value": 1, - "alarm_name": "IAMPolicyChanges", - "alarm_desc": "Alarm for IAMPolicyChanges > 0", - "alarm_threshold": 1 - } + "filter_name": "IAMPolicyChanges", + "filter_pattern": "{($.eventName=DeleteGroupPolicy) || ($.eventName=DeleteRolePolicy) || ($.eventName=DeleteUserPolicy) || ($.eventName=PutGroupPolicy) || ($.eventName=PutRolePolicy) || ($.eventName=PutUserPolicy) || ($.eventName=CreatePolicy) || ($.eventName=DeletePolicy) || ($.eventName=CreatePolicyVersion) || ($.eventName=DeletePolicyVersion) || ($.eventName=AttachRolePolicy) || ($.eventName=DetachRolePolicy) || ($.eventName=AttachUserPolicy) || ($.eventName=DetachUserPolicy) || ($.eventName=AttachGroupPolicy) || ($.eventName=DetachGroupPolicy)}", + "metric_name": "IAMPolicyChanges", + "metric_value": 1, + "alarm_name": "IAMPolicyChanges", + "alarm_desc": "Alarm for IAMPolicyChanges > 0", + "alarm_threshold": 1, +} cloudtrailChangesFilter = { - "filter_name": "CloudTrailChanges", - "filter_pattern": '{($.eventName=CreateTrail) || ($.eventName=UpdateTrail) || ($.eventName=DeleteTrail) || ($.eventName=StartLogging) || ($.eventName=StopLogging)}', - "metric_name": "CloudTrailChanges", - "metric_value": 1, - "alarm_name": "CloudTrailChanges", - "alarm_desc": "Alarm for CloudTrailChanges > 0", - "alarm_threshold": 1 + "filter_name": "CloudTrailChanges", + "filter_pattern": "{($.eventName=CreateTrail) || ($.eventName=UpdateTrail) || ($.eventName=DeleteTrail) || ($.eventName=StartLogging) || ($.eventName=StopLogging)}", + "metric_name": "CloudTrailChanges", + "metric_value": 1, + "alarm_name": "CloudTrailChanges", + "alarm_desc": "Alarm for CloudTrailChanges > 0", + "alarm_threshold": 1, } consoleAuthenticationFailureFilter = { - "filter_name": "ConsoleAuthenticationFailure", - "filter_pattern": '{($.eventName=ConsoleLogin) && ($.errorMessage="Failed authentication")}', - "metric_name": "ConsoleAuthenticationFailure", - "metric_value": 1, - "alarm_name": "ConsoleAuthenticationFailure", - "alarm_desc": "Alarm for ConsoleAuthenticationFailure > 0", - "alarm_threshold": 1 - } + "filter_name": "ConsoleAuthenticationFailure", + "filter_pattern": '{($.eventName=ConsoleLogin) && ($.errorMessage="Failed authentication")}', + "metric_name": "ConsoleAuthenticationFailure", + "metric_value": 1, + "alarm_name": "ConsoleAuthenticationFailure", + "alarm_desc": "Alarm for ConsoleAuthenticationFailure > 0", + "alarm_threshold": 1, +} disableOrDeleteCMKFilter = { - "filter_name": "DisableOrDeleteCMK", - "filter_pattern": '{($.eventSource=kms.amazonaws.com) && (($.eventName=DisableKey) || ($.eventName=ScheduleKeyDeletion))}', - "metric_name": "DisableOrDeleteCMK", - "metric_value": 1, - "alarm_name": "DisableOrDeleteCMK", - "alarm_desc": "Alarm for DisableOrDeleteCMK > 0", - "alarm_threshold": 1 + "filter_name": "DisableOrDeleteCMK", + "filter_pattern": "{($.eventSource=kms.amazonaws.com) && (($.eventName=DisableKey) || ($.eventName=ScheduleKeyDeletion))}", + "metric_name": "DisableOrDeleteCMK", + "metric_value": 1, + "alarm_name": "DisableOrDeleteCMK", + "alarm_desc": "Alarm for DisableOrDeleteCMK > 0", + "alarm_threshold": 1, } s3BucketPolicyChangesFilter = { - "filter_name": "S3BucketPolicyChanges", - "filter_pattern": '{($.eventSource=s3.amazonaws.com) && (($.eventName=PutBucketAcl) || ($.eventName=PutBucketPolicy) || ($.eventName=PutBucketCors) || ($.eventName=PutBucketLifecycle) || ($.eventName=PutBucketReplication) || ($.eventName=DeleteBucketPolicy) || ($.eventName=DeleteBucketCors) || ($.eventName=DeleteBucketLifecycle) || ($.eventName=DeleteBucketReplication))}', - "metric_name": "S3BucketPolicyChanges", - "metric_value": 1, - "alarm_name": "S3BucketPolicyChanges", - "alarm_desc": "Alarm for S3BucketPolicyChanges > 0", - "alarm_threshold": 1 + "filter_name": "S3BucketPolicyChanges", + "filter_pattern": "{($.eventSource=s3.amazonaws.com) && (($.eventName=PutBucketAcl) || ($.eventName=PutBucketPolicy) || ($.eventName=PutBucketCors) || ($.eventName=PutBucketLifecycle) || ($.eventName=PutBucketReplication) || ($.eventName=DeleteBucketPolicy) || ($.eventName=DeleteBucketCors) || ($.eventName=DeleteBucketLifecycle) || ($.eventName=DeleteBucketReplication))}", + "metric_name": "S3BucketPolicyChanges", + "metric_value": 1, + "alarm_name": "S3BucketPolicyChanges", + "alarm_desc": "Alarm for S3BucketPolicyChanges > 0", + "alarm_threshold": 1, } awsConfigChangesFilter = { - "filter_name": "AWSConfigChanges", - "filter_pattern": '{($.eventSource=config.amazonaws.com) && (($.eventName=StopConfigurationRecorder) || ($.eventName=DeleteDeliveryChannel) || ($.eventName=PutDeliveryChannel) || ($.eventName=PutConfigurationRecorder))}', - "metric_name": "AWSConfigChanges", - "metric_value": 1, - "alarm_name": "AWSConfigChanges", - "alarm_desc": "Alarm for AWSConfigChanges > 0", - "alarm_threshold": 1 + "filter_name": "AWSConfigChanges", + "filter_pattern": "{($.eventSource=config.amazonaws.com) && (($.eventName=StopConfigurationRecorder) || ($.eventName=DeleteDeliveryChannel) || ($.eventName=PutDeliveryChannel) || ($.eventName=PutConfigurationRecorder))}", + "metric_name": "AWSConfigChanges", + "metric_value": 1, + "alarm_name": "AWSConfigChanges", + "alarm_desc": "Alarm for AWSConfigChanges > 0", + "alarm_threshold": 1, } securityGroupChangesFilter = { - "filter_name": "SecurityGroupChanges", - "filter_pattern": '{($.eventName=AuthorizeSecurityGroupIngress) || ($.eventName=AuthorizeSecurityGroupEgress) || ($.eventName=RevokeSecurityGroupIngress) || ($.eventName=RevokeSecurityGroupEgress) || ($.eventName=CreateSecurityGroup) || ($.eventName=DeleteSecurityGroup)}', - "metric_name": "SecurityGroupChanges", - "metric_value": 1, - "alarm_name": "SecurityGroupChanges", - "alarm_desc": "Alarm for SecurityGroupChanges > 0", - "alarm_threshold": 1 + "filter_name": "SecurityGroupChanges", + "filter_pattern": "{($.eventName=AuthorizeSecurityGroupIngress) || ($.eventName=AuthorizeSecurityGroupEgress) || ($.eventName=RevokeSecurityGroupIngress) || ($.eventName=RevokeSecurityGroupEgress) || ($.eventName=CreateSecurityGroup) || ($.eventName=DeleteSecurityGroup)}", + "metric_name": "SecurityGroupChanges", + "metric_value": 1, + "alarm_name": "SecurityGroupChanges", + "alarm_desc": "Alarm for SecurityGroupChanges > 0", + "alarm_threshold": 1, } networkACLChangesFilter = { - "filter_name": "NetworkACLChanges", - "filter_pattern": '{($.eventName=CreateNetworkAcl) || ($.eventName=CreateNetworkAclEntry) || ($.eventName=DeleteNetworkAcl) || ($.eventName=DeleteNetworkAclEntry) || ($.eventName=ReplaceNetworkAclEntry) || ($.eventName=ReplaceNetworkAclAssociation)}', - "metric_name": "NetworkACLChanges", - "metric_value": 1, - "alarm_name": "NetworkACLChanges", - "alarm_desc": "Alarm for NetworkACLChanges > 0", - "alarm_threshold": 1 + "filter_name": "NetworkACLChanges", + "filter_pattern": "{($.eventName=CreateNetworkAcl) || ($.eventName=CreateNetworkAclEntry) || ($.eventName=DeleteNetworkAcl) || ($.eventName=DeleteNetworkAclEntry) || ($.eventName=ReplaceNetworkAclEntry) || ($.eventName=ReplaceNetworkAclAssociation)}", + "metric_name": "NetworkACLChanges", + "metric_value": 1, + "alarm_name": "NetworkACLChanges", + "alarm_desc": "Alarm for NetworkACLChanges > 0", + "alarm_threshold": 1, } networkGatewayChangesFilter = { - "filter_name": "NetworkGatewayChanges", - "filter_pattern": '{($.eventName=CreateCustomerGateway) || ($.eventName=DeleteCustomerGateway) || ($.eventName=AttachInternetGateway) || ($.eventName=CreateInternetGateway) || ($.eventName=DeleteInternetGateway) || ($.eventName=DetachInternetGateway)}', - "metric_name": "NetworkGatewayChanges", - "metric_value": 1, - "alarm_name": "NetworkGatewayChanges", - "alarm_desc": "Alarm for NetworkGatewayChanges > 0", - "alarm_threshold": 1 + "filter_name": "NetworkGatewayChanges", + "filter_pattern": "{($.eventName=CreateCustomerGateway) || ($.eventName=DeleteCustomerGateway) || ($.eventName=AttachInternetGateway) || ($.eventName=CreateInternetGateway) || ($.eventName=DeleteInternetGateway) || ($.eventName=DetachInternetGateway)}", + "metric_name": "NetworkGatewayChanges", + "metric_value": 1, + "alarm_name": "NetworkGatewayChanges", + "alarm_desc": "Alarm for NetworkGatewayChanges > 0", + "alarm_threshold": 1, } routeTableChangesFilter = { - "filter_name": "RouteTableChanges", - "filter_pattern": '{($.eventName=CreateRoute) || ($.eventName=CreateRouteTable) || ($.eventName=ReplaceRoute) || ($.eventName=ReplaceRouteTableAssociation) || ($.eventName=DeleteRouteTable) || ($.eventName=DeleteRoute) || ($.eventName=DisassociateRouteTable)}', - "metric_name": "RouteTableChanges", - "metric_value": 1, - "alarm_name": "RouteTableChanges", - "alarm_desc": "Alarm for RouteTableChanges > 0", - "alarm_threshold": 1 + "filter_name": "RouteTableChanges", + "filter_pattern": "{($.eventName=CreateRoute) || ($.eventName=CreateRouteTable) || ($.eventName=ReplaceRoute) || ($.eventName=ReplaceRouteTableAssociation) || ($.eventName=DeleteRouteTable) || ($.eventName=DeleteRoute) || ($.eventName=DisassociateRouteTable)}", + "metric_name": "RouteTableChanges", + "metric_value": 1, + "alarm_name": "RouteTableChanges", + "alarm_desc": "Alarm for RouteTableChanges > 0", + "alarm_threshold": 1, } vpcChangesFilter = { - "filter_name": "VPCChanges", - "filter_pattern": '{($.eventName=CreateVpc) || ($.eventName=DeleteVpc) || ($.eventName=ModifyVpcAttribute) || ($.eventName=AcceptVpcPeeringConnection) || ($.eventName=CreateVpcPeeringConnection) || ($.eventName=DeleteVpcPeeringConnection) || ($.eventName=RejectVpcPeeringConnection) || ($.eventName=AttachClassicLinkVpc) || ($.eventName=DetachClassicLinkVpc) || ($.eventName=DisableVpcClassicLink) || ($.eventName=EnableVpcClassicLink)}', - "metric_name": "VPCChanges", - "metric_value": 1, - "alarm_name": "VPCChanges", - "alarm_desc": "Alarm for VPCChanges > 0", - "alarm_threshold": 1 + "filter_name": "VPCChanges", + "filter_pattern": "{($.eventName=CreateVpc) || ($.eventName=DeleteVpc) || ($.eventName=ModifyVpcAttribute) || ($.eventName=AcceptVpcPeeringConnection) || ($.eventName=CreateVpcPeeringConnection) || ($.eventName=DeleteVpcPeeringConnection) || ($.eventName=RejectVpcPeeringConnection) || ($.eventName=AttachClassicLinkVpc) || ($.eventName=DetachClassicLinkVpc) || ($.eventName=DisableVpcClassicLink) || ($.eventName=EnableVpcClassicLink)}", + "metric_name": "VPCChanges", + "metric_value": 1, + "alarm_name": "VPCChanges", + "alarm_desc": "Alarm for VPCChanges > 0", + "alarm_threshold": 1, } Cloudwatch_mappings = { - 'cis-aws-foundations-benchmark': { - '1.2.0': { - '3.1': unauthorizedAPICallsFilter, - '3.2': consoleSignInWithoutMFAFilter, - '3.3': rootAccountUsageFilter, - '3.4': iamPolicyChangesFilter, - '3.5': cloudtrailChangesFilter, - '3.6': consoleAuthenticationFailureFilter, - '3.7': disableOrDeleteCMKFilter, - '3.8': s3BucketPolicyChangesFilter, - '3.9': awsConfigChangesFilter, - '3.10': securityGroupChangesFilter, - '3.11': networkACLChangesFilter, - '3.12': networkGatewayChangesFilter, - '3.13': routeTableChangesFilter, - '3.14': vpcChangesFilter + "cis-aws-foundations-benchmark": { + "1.2.0": { + "3.1": unauthorizedAPICallsFilter, + "3.2": consoleSignInWithoutMFAFilter, + "3.3": rootAccountUsageFilter, + "3.4": iamPolicyChangesFilter, + "3.5": cloudtrailChangesFilter, + "3.6": consoleAuthenticationFailureFilter, + "3.7": disableOrDeleteCMKFilter, + "3.8": s3BucketPolicyChangesFilter, + "3.9": awsConfigChangesFilter, + "3.10": securityGroupChangesFilter, + "3.11": networkACLChangesFilter, + "3.12": networkGatewayChangesFilter, + "3.13": routeTableChangesFilter, + "3.14": vpcChangesFilter, + }, + "1.4.0": { + "4.3": rootAccountUsageFilter, + "4.4": iamPolicyChangesFilter, + "4.5": cloudtrailChangesFilter, + "4.6": consoleAuthenticationFailureFilter, + "4.7": disableOrDeleteCMKFilter, + "4.8": s3BucketPolicyChangesFilter, + "4.9": awsConfigChangesFilter, + "4.10": securityGroupChangesFilter, + "4.11": networkACLChangesFilter, + "4.12": networkGatewayChangesFilter, + "4.13": routeTableChangesFilter, + "4.14": vpcChangesFilter, }, - '1.4.0': { - '4.3': rootAccountUsageFilter, - '4.4': iamPolicyChangesFilter, - '4.5': cloudtrailChangesFilter, - '4.6': consoleAuthenticationFailureFilter, - '4.7': disableOrDeleteCMKFilter, - '4.8': s3BucketPolicyChangesFilter, - '4.9': awsConfigChangesFilter, - '4.10': securityGroupChangesFilter, - '4.11': networkACLChangesFilter, - '4.12': networkGatewayChangesFilter, - '4.13': routeTableChangesFilter, - '4.14': vpcChangesFilter - } }, - 'security-control': { - '2.0.0': { - "CloudWatch.1": rootAccountUsageFilter, - "CloudWatch.2": unauthorizedAPICallsFilter, - "CloudWatch.3": consoleSignInWithoutMFAFilter, - "CloudWatch.4": iamPolicyChangesFilter, - "CloudWatch.5": cloudtrailChangesFilter, - "CloudWatch.6": consoleAuthenticationFailureFilter, - "CloudWatch.7": disableOrDeleteCMKFilter, - "CloudWatch.8": s3BucketPolicyChangesFilter, - "CloudWatch.9": awsConfigChangesFilter, - "CloudWatch.10": securityGroupChangesFilter, - "CloudWatch.11": networkACLChangesFilter, - "CloudWatch.12": networkGatewayChangesFilter, - "CloudWatch.13": routeTableChangesFilter, - "CloudWatch.14": vpcChangesFilter + "security-control": { + "2.0.0": { + "CloudWatch.1": rootAccountUsageFilter, + "CloudWatch.2": unauthorizedAPICallsFilter, + "CloudWatch.3": consoleSignInWithoutMFAFilter, + "CloudWatch.4": iamPolicyChangesFilter, + "CloudWatch.5": cloudtrailChangesFilter, + "CloudWatch.6": consoleAuthenticationFailureFilter, + "CloudWatch.7": disableOrDeleteCMKFilter, + "CloudWatch.8": s3BucketPolicyChangesFilter, + "CloudWatch.9": awsConfigChangesFilter, + "CloudWatch.10": securityGroupChangesFilter, + "CloudWatch.11": networkACLChangesFilter, + "CloudWatch.12": networkGatewayChangesFilter, + "CloudWatch.13": routeTableChangesFilter, + "CloudWatch.14": vpcChangesFilter, } - } + }, } + def verify(event, _): try: - standard_mapping = Cloudwatch_mappings.get(event['StandardLongName']).get(event['StandardVersion']) - return standard_mapping.get(event['ControlId'], None) + standard_mapping = Cloudwatch_mappings[event["StandardLongName"]][ + event["StandardVersion"] + ] + return standard_mapping.get(event["ControlId"], None) except KeyError as ex: - exit(f'ERROR: Could not find associated metric filter. Missing parameter: {str(ex)}') - - \ No newline at end of file + exit( + f"ERROR: Could not find associated metric filter. Missing parameter: {str(ex)}" + ) diff --git a/source/playbooks/common/deserialize_json.py b/source/playbooks/common/deserialize_json.py index d01d83d5..62e3a56d 100644 --- a/source/playbooks/common/deserialize_json.py +++ b/source/playbooks/common/deserialize_json.py @@ -2,9 +2,10 @@ # SPDX-License-Identifier: Apache-2.0 import json + def event_handler(event, _): try: - return json.loads(event['SerializedJson']) + return json.loads(event["SerializedJson"]) except Exception as e: print(e) - exit('Failed to deserialize data') + exit("Failed to deserialize data") diff --git a/source/playbooks/common/get_input_params.py b/source/playbooks/common/get_input_params.py new file mode 100644 index 00000000..5cc436d1 --- /dev/null +++ b/source/playbooks/common/get_input_params.py @@ -0,0 +1,36 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import re + + +def parse_non_string_types(param): + if re.match("^\d+$", str(param)): + param = int(param) + return param + if param == "true" or param == "True": + return True + if param == "false" or param == "False": + return False + if isinstance(param, list): + return param + if len(param.split(",")) > 1: + return param.split(",") + return param + + +def get_input_params(event, _): + security_hub_input_params = event["SecHubInputParams"] + + default_params = event["DefaultParams"] + + input_params = {} + + for param in default_params: + if param in security_hub_input_params: + converted_param = parse_non_string_types(security_hub_input_params[param]) + input_params[param] = converted_param + else: + converted_param = parse_non_string_types(default_params[param]) + input_params[param] = converted_param + + return input_params diff --git a/source/playbooks/common/parse_input.py b/source/playbooks/common/parse_input.py index 65cf873b..7ca7d4a7 100644 --- a/source/playbooks/common/parse_input.py +++ b/source/playbooks/common/parse_input.py @@ -1,105 +1,108 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import re import json +import re +from typing import Any + import boto3 from botocore.config import Config + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def connect_to_ssm(boto_config): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) + def get_solution_id(): - return 'SO0111' + return "SO0111" + def get_solution_version(): ssm = connect_to_ssm( Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/unknown' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/unknown", ) ) - solution_version = 'unknown' + solution_version = "unknown" try: ssm_parm_value = ssm.get_parameter( - Name=f'/Solutions/{get_solution_id()}/member-version' - )['Parameter'].get('Value', 'unknown') + Name=f"/Solutions/{get_solution_id()}/member-version" + )["Parameter"].get("Value", "unknown") solution_version = ssm_parm_value except Exception as e: print(e) - print(f'ERROR getting solution version') + print("ERROR getting solution version") return solution_version + def get_shortname(long_name): short_name = { - 'aws-foundational-security-best-practices': 'AFSBP', - 'cis-aws-foundations-benchmark': 'CIS', - 'pci-dss': 'PCI', - 'security-control': 'SC' + "aws-foundational-security-best-practices": "AFSBP", + "cis-aws-foundations-benchmark": "CIS", + "pci-dss": "PCI", + "security-control": "SC", } return short_name.get(long_name, None) + def get_config_rule(rule_name): boto_config = Config( - retries = { - 'mode': 'standard' - }, - user_agent_extra = f'AwsSolution/{get_solution_id()}/{get_solution_version()}' + retries={"mode": "standard"}, + user_agent_extra=f"AwsSolution/{get_solution_id()}/{get_solution_version()}", ) config_rule = None try: configsvc = connect_to_config(boto_config) - config_rule = configsvc.describe_config_rules( - ConfigRuleNames=[ rule_name ] - ).get('ConfigRules', [])[0] + config_rule = configsvc.describe_config_rules(ConfigRuleNames=[rule_name]).get( + "ConfigRules", [] + )[0] except Exception as e: print(e) - exit(f'ERROR getting config rule {rule_name}') + exit(f"ERROR getting config rule {rule_name}") return config_rule + class FindingEvent: """ Finding object returns the parse fields from an input finding json object """ + def _get_resource_id(self, parse_id_pattern, resource_index): - identifier_raw = self.finding_json['Resources'][0]['Id'] + identifier_raw = self.finding_json["Resources"][0]["Id"] self.resource_id = identifier_raw self.resource_id_matches = [] if parse_id_pattern: - identifier_match = re.match( - parse_id_pattern, - identifier_raw - ) + identifier_match = re.match(parse_id_pattern, identifier_raw) if identifier_match: - for group in range(1, len(identifier_match.groups())+1): + for group in range(1, len(identifier_match.groups()) + 1): self.resource_id_matches.append(identifier_match.group(group)) self.resource_id = identifier_match.group(resource_index) else: - exit(f'ERROR: Invalid resource Id {identifier_raw}') - + exit(f"ERROR: Invalid resource Id {identifier_raw}") + def _get_sc_check(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:'+ - 'security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:" + + "security-control/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: - self.standard_id = get_shortname('security-control') + self.standard_id = get_shortname("security-control") self.control_id = match_finding_id.group(1) return match_finding_id def _get_standard_info(self): match_finding_id = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:'+ - 'subscription/(.*?)/v/(\d+\.\d+\.\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$', - self.finding_json['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:" + + "subscription/(.*?)/v/(\d+\.\d+\.\d+)/(.*)/finding/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})$", + self.finding_json["Id"], ) if match_finding_id: self.standard_id = get_shortname(match_finding_id.group(1)) @@ -109,99 +112,140 @@ def _get_standard_info(self): match_sc_finding_id = self._get_sc_check() if not match_sc_finding_id: self.valid_finding = False - self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]}' + self.invalid_finding_reason = ( + f'Finding Id is invalid: {self.finding_json["Id"]}' + ) def _get_aws_config_rule(self): # config_rule_id refers to the AWS Config Rule that produced the finding - if "RelatedAWSResources:0/type" in self.finding_json['ProductFields'] and self.finding_json['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule': - self.aws_config_rule_id = self.finding_json['ProductFields']['RelatedAWSResources:0/name'] + if ( + "RelatedAWSResources:0/type" in self.finding_json["ProductFields"] + and self.finding_json["ProductFields"]["RelatedAWSResources:0/type"] + == "AWS::Config::ConfigRule" + ): + self.aws_config_rule_id = self.finding_json["ProductFields"][ + "RelatedAWSResources:0/name" + ] self.aws_config_rule = get_config_rule(self.aws_config_rule_id) def _get_region_from_resource_id(self): check_for_region = re.match( - r'^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\d):.*:.*$', - self.finding_json['Resources'][0]['Id'] + r"^arn:(?:aws|aws-cn|aws-us-gov):[a-zA-Z0-9]+:([a-z]{2}(?:-gov)?-[a-z]+-\d):.*:.*$", + self.finding_json["Resources"][0]["Id"], ) if check_for_region: self.resource_region = check_for_region.group(1) - def __init__(self, finding_json, parse_id_pattern, expected_control_id, resource_index): + def __init__( + self, finding_json, parse_id_pattern, expected_control_id, resource_index + ): self.valid_finding = True self.resource_region = None self.control_id = None self.aws_config_rule_id = None self.aws_config_rule = {} + self.input_params = {} """Populate fields""" # v1.5 - self.finding_json = finding_json - self._get_resource_id(parse_id_pattern, resource_index) # self.resource_id, self.resource_id_matches - self._get_standard_info() # self.standard_id, self.standard_version, self.control_id + self.finding_json: Any = finding_json + self._get_resource_id( + parse_id_pattern, resource_index + ) # self.resource_id, self.resource_id_matches + self._get_standard_info() # self.standard_id, self.standard_version, self.control_id # V1.4 - self.account_id = self.finding_json.get('AwsAccountId', None) # deprecate - get Finding.AwsAccountId - if not re.match(r'^\d{12}$', self.account_id) and self.valid_finding: + self.account_id = self.finding_json.get( + "AwsAccountId", None + ) # deprecate - get Finding.AwsAccountId + if not re.match(r"^\d{12}$", self.account_id) and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'AwsAccountId is invalid: {self.account_id}' - self.finding_id = self.finding_json.get('Id', None) # deprecate - self.product_arn = self.finding_json.get('ProductArn', None) - if not re.match(r'^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\d::product/aws/securityhub$', self.product_arn): + self.invalid_finding_reason = f"AwsAccountId is invalid: {self.account_id}" + self.finding_id = self.finding_json.get("Id", None) # deprecate + self.product_arn = self.finding_json.get("ProductArn", None) + if not re.match( + r"^arn:(?:aws|aws-cn|aws-us-gov):securityhub:[a-z]{2}(?:-gov)?-[a-z]+-\d::product/aws/securityhub$", + self.product_arn, + ): if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'ProductArn is invalid: {self.product_arn}' - self.details = self.finding_json['Resources'][0].get('Details', {}) + self.invalid_finding_reason = ( + f"ProductArn is invalid: {self.product_arn}" + ) + self.details = self.finding_json["Resources"][0].get("Details", {}) # Test mode is used with fabricated finding data to tell the # remediation runbook to run in test more (where supported) # Currently not widely-used and perhaps should be deprecated. - self.testmode = bool('testmode' in self.finding_json) - self.resource = self.finding_json['Resources'][0] + self.testmode = bool("testmode" in self.finding_json) + self.resource = self.finding_json["Resources"][0] self._get_region_from_resource_id() self._get_aws_config_rule() - self.affected_object = {'Type': self.resource['Type'], 'Id': self.resource_id, 'OutputKey': 'Remediation.Output'} + + if "InputParameters" in self.aws_config_rule: + self.input_params = json.loads(self.aws_config_rule["InputParameters"]) + + self.affected_object = { + "Type": self.resource["Type"], + "Id": self.resource_id, + "OutputKey": "Remediation.Output", + } # Validate control_id if not self.control_id: if self.valid_finding: self.valid_finding = False self.invalid_finding_reason = f'Finding Id is invalid: {self.finding_json["Id"]} - missing Control Id' - elif self.control_id not in expected_control_id: # ControlId is the expected value + elif ( + self.control_id not in expected_control_id + ): # ControlId is the expected value if self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = f'Control Id from input ({self.control_id}) does not match {str(expected_control_id)}' + self.invalid_finding_reason = f"Control Id from input ({self.control_id}) does not match {str(expected_control_id)}" if not self.resource_id and self.valid_finding: self.valid_finding = False - self.invalid_finding_reason = 'Resource Id is missing from the finding json Resources (Id)' + self.invalid_finding_reason = ( + "Resource Id is missing from the finding json Resources (Id)" + ) if not self.valid_finding: # Error message and return error data - msg = f'ERROR: {self.invalid_finding_reason}' + msg = f"ERROR: {self.invalid_finding_reason}" exit(msg) def __str__(self): return json.dumps(self.__dict__) -''' + +""" MAIN -''' +""" + + def parse_event(event, _): - finding_event = FindingEvent(event['Finding'], event['parse_id_pattern'], event['expected_control_id'], event.get('resource_index', 1)) + finding_event = FindingEvent( + event["Finding"], + event["parse_id_pattern"], + event["expected_control_id"], + event.get("resource_index", 1), + ) if not finding_event.valid_finding: - exit('ERROR: Finding is not valid') + exit("ERROR: Finding is not valid") return { "account_id": finding_event.account_id, "resource_id": finding_event.resource_id, - "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ + "finding_id": finding_event.finding_id, # Deprecate v1.5.0+ "control_id": finding_event.control_id, - "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ + "product_arn": finding_event.product_arn, # Deprecate v1.5.0+ "object": finding_event.affected_object, "matches": finding_event.resource_id_matches, - "details": finding_event.details, # Deprecate v1.5.0+ - "testmode": finding_event.testmode, # Deprecate v1.5.0+ + "details": finding_event.details, # Deprecate v1.5.0+ + "testmode": finding_event.testmode, # Deprecate v1.5.0+ "resource": finding_event.resource, "resource_region": finding_event.resource_region, "finding": finding_event.finding_json, - "aws_config_rule": finding_event.aws_config_rule + "aws_config_rule": finding_event.aws_config_rule, + "input_params": finding_event.input_params, } diff --git a/source/playbooks/common/test/conftest.py b/source/playbooks/common/test/conftest.py index 3ea5b014..08432a5a 100644 --- a/source/playbooks/common/test/conftest.py +++ b/source/playbooks/common/test/conftest.py @@ -1,8 +1,10 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import os + import pytest + @pytest.fixture(scope="module", autouse=True) def aws_credentials(): os.environ["AWS_ACCESS_KEY_ID"] = "testing" diff --git a/source/playbooks/common/test/test_afsbp_parse.py b/source/playbooks/common/test/test_afsbp_parse.py index 527bb7f5..df8287d6 100644 --- a/source/playbooks/common/test/test_afsbp_parse.py +++ b/source/playbooks/common/test/test_afsbp_parse.py @@ -1,20 +1,17 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import boto3 -import json import botocore.session -from botocore.stub import Stubber -from botocore.config import Config import pytest -from pytest_mock import mocker - +from botocore.config import Config +from botocore.stub import Stubber from parse_input import parse_event + def event(): return { - 'expected_control_id': 'AutoScaling.1', - 'parse_id_pattern': '^arn:(?:aws|aws-cn|aws-us-gov):autoscaling:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:autoScalingGroup:(?i:[0-9a-f]{11}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12}):autoScalingGroupName/(.*)$', - 'Finding': { + "expected_control_id": "AutoScaling.1", + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):autoscaling:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:autoScalingGroup:(?i:[0-9a-f]{11}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12}):autoScalingGroupName/(.*)$", + "Finding": { "SchemaVersion": "2018-10-08", "Id": "arn:aws:securityhub:us-east-1:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0/AutoScaling.1/finding/635ceb5d-3dfd-4458-804e-48a42cd723e4", "ProductArn": "arn:aws:securityhub:us-east-1::product/aws/securityhub", @@ -31,14 +28,14 @@ def event(): "Product": 0, "Label": "INFORMATIONAL", "Normalized": 0, - "Original": "INFORMATIONAL" + "Original": "INFORMATIONAL", }, "Title": "AutoScaling.1 Auto scaling groups associated with a load balancer should use load balancer health checks", "Description": "This control checks whether your Auto Scaling groups that are associated with a load balancer are using Elastic Load Balancing health checks.", "Remediation": { "Recommendation": { "Text": "For directions on how to fix this issue, please consult the AWS Security Hub Foundational Security Best Practices documentation.", - "Url": "https://docs.aws.amazon.com/console/securityhub/AutoScaling.1/remediation" + "Url": "https://docs.aws.amazon.com/console/securityhub/AutoScaling.1/remediation", } }, "ProductFields": { @@ -52,14 +49,14 @@ def event(): "aws/securityhub/ProductName": "Security Hub", "aws/securityhub/CompanyName": "AWS", "aws/securityhub/annotation": "AWS Config evaluated your resources against the rule. The rule did not apply to the AWS resources in its scope, the specified resources were deleted, or the evaluation results were deleted.", - "aws/securityhub/FindingId": "arn:aws:securityhub:us-east-1::product/aws/securityhub/arn:aws:securityhub:us-east-1:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0/AutoScaling.1/finding/635ceb5d-3dfd-4458-804e-48a42cd723e4" + "aws/securityhub/FindingId": "arn:aws:securityhub:us-east-1::product/aws/securityhub/arn:aws:securityhub:us-east-1:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0/AutoScaling.1/finding/635ceb5d-3dfd-4458-804e-48a42cd723e4", }, "Resources": [ { "Type": "AwsAccount", "Id": "arn:aws:autoscaling:us-east-2:111111111111:autoScalingGroup:785df3481e1-cd66-435d-96de-d6ed5416defd:autoScalingGroupName/sharr-test-autoscaling-1", "Partition": "aws", - "Region": "us-east-1" + "Region": "us-east-1", } ], "Compliance": { @@ -67,54 +64,51 @@ def event(): "StatusReasons": [ { "ReasonCode": "CONFIG_EVALUATIONS_EMPTY", - "Description": "AWS Config evaluated your resources against the rule. The rule did not apply to the AWS resources in its scope, the specified resources were deleted, or the evaluation results were deleted." + "Description": "AWS Config evaluated your resources against the rule. The rule did not apply to the AWS resources in its scope, the specified resources were deleted, or the evaluation results were deleted.", } - ] + ], }, "WorkflowState": "NEW", - "Workflow": { - "Status": "NEW" - }, - "RecordState": "ACTIVE" - } + "Workflow": {"Status": "NEW"}, + "RecordState": "ACTIVE", + }, } + def expected(): return { - "account_id": '111111111111', - "resource_id": 'sharr-test-autoscaling-1', - 'control_id': 'AutoScaling.1', - 'testmode': False, - "finding_id": 'arn:aws:securityhub:us-east-1:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0/AutoScaling.1/finding/635ceb5d-3dfd-4458-804e-48a42cd723e4', - "product_arn": 'arn:aws:securityhub:us-east-1::product/aws/securityhub', + "account_id": "111111111111", + "resource_id": "sharr-test-autoscaling-1", + "control_id": "AutoScaling.1", + "testmode": False, + "finding_id": "arn:aws:securityhub:us-east-1:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0/AutoScaling.1/finding/635ceb5d-3dfd-4458-804e-48a42cd723e4", + "product_arn": "arn:aws:securityhub:us-east-1::product/aws/securityhub", "object": { - "Type": 'AwsAccount', - "Id": 'sharr-test-autoscaling-1', - "OutputKey": 'Remediation.Output' + "Type": "AwsAccount", + "Id": "sharr-test-autoscaling-1", + "OutputKey": "Remediation.Output", }, - "matches": [ "sharr-test-autoscaling-1" ], - 'details': {}, - 'resource': event().get('Finding').get('Resources')[0], - 'resource_region': 'us-east-2', - 'aws_config_rule': { + "matches": ["sharr-test-autoscaling-1"], + "details": {}, + "resource": event().get("Finding").get("Resources")[0], + "resource_region": "us-east-2", + "aws_config_rule": { "ConfigRuleName": "s3-bucket-server-side-encryption-enabled", "ConfigRuleArn": "arn:aws:config:us-east-1:111111111111:config-rule/config-rule-vye3dl", "ConfigRuleId": "config-rule-vye3dl", "Description": "Checks whether the S3 bucket policy denies the put-object requests that are not encrypted using AES-256 or AWS KMS.", - "Scope": { - "ComplianceResourceTypes": [ - "AWS::S3::Bucket" - ] - }, + "Scope": {"ComplianceResourceTypes": ["AWS::S3::Bucket"]}, "Source": { "Owner": "AWS", - "SourceIdentifier": "S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED" + "SourceIdentifier": "S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED", }, "InputParameters": "{}", - "ConfigRuleState": "ACTIVE" - } + "ConfigRuleState": "ACTIVE", + }, + "input_params": {}, } + def config_rule(): return { "ConfigRules": [ @@ -123,143 +117,166 @@ def config_rule(): "ConfigRuleArn": "arn:aws:config:us-east-1:111111111111:config-rule/config-rule-vye3dl", "ConfigRuleId": "config-rule-vye3dl", "Description": "Checks whether the S3 bucket policy denies the put-object requests that are not encrypted using AES-256 or AWS KMS.", - "Scope": { - "ComplianceResourceTypes": [ - "AWS::S3::Bucket" - ] - }, + "Scope": {"ComplianceResourceTypes": ["AWS::S3::Bucket"]}, "Source": { "Owner": "AWS", - "SourceIdentifier": "S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED" + "SourceIdentifier": "S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED", }, "InputParameters": "{}", - "ConfigRuleState": "ACTIVE" + "ConfigRuleState": "ACTIVE", } ] } + def ssm_parm(): return { - 'Parameter': { - 'Name': 'Solutions/SO0111/member_version', - 'Type': 'String', - 'Value': 'v1.5.0' + "Parameter": { + "Name": "Solutions/SO0111/member_version", + "Type": "String", + "Value": "v1.5.0", } } -BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - } -) + + +BOTO_CONFIG = Config(retries={"mode": "standard"}) + @pytest.fixture(autouse=True) def run_before_and_after_tests(mocker): - cfg_client = botocore.session.get_session().create_client('config', config=BOTO_CONFIG) - cfg_stubber = Stubber(cfg_client) - cfg_stubber.add_response( - 'describe_config_rules', - config_rule() + cfg_client = botocore.session.get_session().create_client( + "config", config=BOTO_CONFIG ) + cfg_stubber = Stubber(cfg_client) + cfg_stubber.add_response("describe_config_rules", config_rule()) cfg_stubber.activate() - mocker.patch('parse_input.connect_to_config', return_value=cfg_client) + mocker.patch("parse_input.connect_to_config", return_value=cfg_client) - ssm_client = botocore.session.get_session().create_client('ssm', config=BOTO_CONFIG) + ssm_client = botocore.session.get_session().create_client("ssm", config=BOTO_CONFIG) ssm_stubber = Stubber(ssm_client) - ssm_stubber.add_response( - 'get_parameter', - ssm_parm() - ) + ssm_stubber.add_response("get_parameter", ssm_parm()) ssm_stubber.activate() - mocker.patch('parse_input.connect_to_ssm', return_value=ssm_client) + mocker.patch("parse_input.connect_to_ssm", return_value=ssm_client) yield cfg_stubber.deactivate() ssm_stubber.deactivate() + def test_parse_event(mocker): expected_result = expected() - expected_result['finding'] = event().get('Finding') + expected_result["finding"] = event().get("Finding") parsed_event = parse_event(event(), {}) assert parsed_event == expected_result + def test_parse_event_multimatch(mocker): expected_result = expected() - expected_result['finding'] = event().get('Finding') - expected_result['matches'] = [ - "us-east-2", - "sharr-test-autoscaling-1" - ] + expected_result["finding"] = event().get("Finding") + expected_result["matches"] = ["us-east-2", "sharr-test-autoscaling-1"] test_event = event() - test_event['resource_index'] = 2 - test_event['parse_id_pattern'] = r'^arn:(?:aws|aws-cn|aws-us-gov):autoscaling:((?:[a-z]{2}(?:-gov)?-[a-z]+-\d)):\d{12}:autoScalingGroup:(?i:[0-9a-f]{11}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12}):autoScalingGroupName/(.*)$' + test_event["resource_index"] = 2 + test_event["parse_id_pattern"] = ( + r"^arn:(?:aws|aws-cn|aws-us-gov):autoscaling:((?:[a-z]{2}(?:-gov)?-[a-z]+-\d)):\d{12}:autoScalingGroup:(?i:[0-9a-f]{11}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12}):autoScalingGroupName/(.*)$" + ) parsed_event = parse_event(test_event, {}) assert parsed_event == expected_result + def test_bad_finding_id(mocker): test_event = event() - test_event['Finding']['Id'] = "badvalue" + test_event["Finding"]["Id"] = "badvalue" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Finding Id is invalid: badvalue' + assert pytest_wrapped_e.value.code == "ERROR: Finding Id is invalid: badvalue" + def test_bad_control_id(mocker): test_event = event() - test_event['Finding']['Id'] = "arn:aws:securityhub:us-east-2:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0//finding/635ceb5d-3dfd-4458-804e-48a42cd723e4" + test_event["Finding"][ + "Id" + ] = "arn:aws:securityhub:us-east-2:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0//finding/635ceb5d-3dfd-4458-804e-48a42cd723e4" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Finding Id is invalid: arn:aws:securityhub:us-east-2:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0//finding/635ceb5d-3dfd-4458-804e-48a42cd723e4 - missing Control Id' + assert ( + pytest_wrapped_e.value.code + == "ERROR: Finding Id is invalid: arn:aws:securityhub:us-east-2:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0//finding/635ceb5d-3dfd-4458-804e-48a42cd723e4 - missing Control Id" + ) + def test_control_id_nomatch(mocker): test_event = event() - test_event['Finding']['Id'] = "arn:aws:securityhub:us-east-2:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0/EC2.1/finding/635ceb5d-3dfd-4458-804e-48a42cd723e4" + test_event["Finding"][ + "Id" + ] = "arn:aws:securityhub:us-east-2:111111111111:subscription/aws-foundational-security-best-practices/v/1.0.0/EC2.1/finding/635ceb5d-3dfd-4458-804e-48a42cd723e4" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Control Id from input (EC2.1) does not match AutoScaling.1' + assert ( + pytest_wrapped_e.value.code + == "ERROR: Control Id from input (EC2.1) does not match AutoScaling.1" + ) + def test_bad_account_id(mocker): test_event = event() - test_event['Finding']['AwsAccountId'] = "1234123412345" + test_event["Finding"]["AwsAccountId"] = "1234123412345" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: AwsAccountId is invalid: 1234123412345' + assert ( + pytest_wrapped_e.value.code == "ERROR: AwsAccountId is invalid: 1234123412345" + ) + def test_bad_productarn(mocker): test_event = event() - test_event['Finding']['ProductArn'] = "badvalue" + test_event["Finding"]["ProductArn"] = "badvalue" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: ProductArn is invalid: badvalue' + assert pytest_wrapped_e.value.code == "ERROR: ProductArn is invalid: badvalue" + def test_bad_resource_match(mocker): test_event = event() - test_event['parse_id_pattern'] = '^arn:(?:aws|aws-cn|aws-us-gov):logs:::([A-Za-z0-9.-]{3,63})$' + test_event["parse_id_pattern"] = ( + "^arn:(?:aws|aws-cn|aws-us-gov):logs:::([A-Za-z0-9.-]{3,63})$" + ) with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Invalid resource Id arn:aws:autoscaling:us-east-2:111111111111:autoScalingGroup:785df3481e1-cd66-435d-96de-d6ed5416defd:autoScalingGroupName/sharr-test-autoscaling-1' + assert ( + pytest_wrapped_e.value.code + == "ERROR: Invalid resource Id arn:aws:autoscaling:us-east-2:111111111111:autoScalingGroup:785df3481e1-cd66-435d-96de-d6ed5416defd:autoScalingGroupName/sharr-test-autoscaling-1" + ) + def test_no_resource_pattern(mocker): test_event = event() expected_result = expected() - expected_result['finding'] = event().get('Finding') - test_event['parse_id_pattern'] = '' - expected_result['resource_id'] = 'arn:aws:autoscaling:us-east-2:111111111111:autoScalingGroup:785df3481e1-cd66-435d-96de-d6ed5416defd:autoScalingGroupName/sharr-test-autoscaling-1' - expected_result['matches'] = [] - expected_result['object']['Id'] = expected_result['resource_id'] + expected_result["finding"] = event().get("Finding") + test_event["parse_id_pattern"] = "" + expected_result["resource_id"] = ( + "arn:aws:autoscaling:us-east-2:111111111111:autoScalingGroup:785df3481e1-cd66-435d-96de-d6ed5416defd:autoScalingGroupName/sharr-test-autoscaling-1" + ) + expected_result["matches"] = [] + expected_result["object"]["Id"] = expected_result["resource_id"] parsed_event = parse_event(test_event, {}) assert parsed_event == expected_result + def test_no_resource_pattern_no_resource_id(mocker): test_event = event() - test_event['parse_id_pattern'] = '' - test_event['Finding']['Resources'][0]['Id'] = '' + test_event["parse_id_pattern"] = "" + test_event["Finding"]["Resources"][0]["Id"] = "" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Resource Id is missing from the finding json Resources (Id)' + assert ( + pytest_wrapped_e.value.code + == "ERROR: Resource Id is missing from the finding json Resources (Id)" + ) diff --git a/source/playbooks/common/test/test_cis120_parse.py b/source/playbooks/common/test/test_cis120_parse.py index 6070b4e8..fe6f067c 100644 --- a/source/playbooks/common/test/test_cis120_parse.py +++ b/source/playbooks/common/test/test_cis120_parse.py @@ -1,20 +1,17 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import boto3 -import json import botocore.session -from botocore.stub import Stubber -from botocore.config import Config import pytest -from pytest_mock import mocker - +from botocore.config import Config +from botocore.stub import Stubber from parse_input import parse_event + def event(): return { - 'expected_control_id': '2.3', - 'parse_id_pattern': '^arn:(?:aws|aws-cn|aws-us-gov):s3:::([A-Za-z0-9.-]{3,63})$', - 'Finding': { + "expected_control_id": "2.3", + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):s3:::([A-Za-z0-9.-]{3,63})$", + "Finding": { "ProductArn": "arn:aws:securityhub:us-east-2::product/aws/securityhub", "Types": [ "Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" @@ -26,24 +23,22 @@ def event(): "StatusReasons": [ { "Description": "The finding is in a WARNING state, because the S3 Bucket associated with this rule is in a different region/account. This rule does not support cross-region/cross-account checks, so it is recommended to disable this control in this region/account and only run it in the region/account where the resource is located.", - "ReasonCode": "S3_BUCKET_CROSS_ACCOUNT_CROSS_REGION" + "ReasonCode": "S3_BUCKET_CROSS_ACCOUNT_CROSS_REGION", } - ] + ], }, "GeneratorId": "arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/2.3", "FirstObservedAt": "2020-05-20T05:02:44.203Z", "CreatedAt": "2020-05-20T05:02:44.203Z", "RecordState": "ACTIVE", "Title": "2.3 Ensure the S3 bucket used to store CloudTrail logs is not publicly accessible", - "Workflow": { - "Status": "NEW" - }, + "Workflow": {"Status": "NEW"}, "LastObservedAt": "2020-06-17T13:01:35.884Z", "Severity": { "Normalized": 90, "Label": "CRITICAL", "Product": 90, - "Original": "CRITICAL" + "Original": "CRITICAL", }, "UpdatedAt": "2020-06-17T13:01:25.561Z", "WorkflowState": "NEW", @@ -61,14 +56,14 @@ def event(): "aws/securityhub/ProductName": "Security Hub", "aws/securityhub/CompanyName": "AWS", "aws/securityhub/annotation": "The finding is in a WARNING state, because the S3 Bucket associated with this rule is in a different region/account. This rule does not support cross-region/cross-account checks, so it is recommended to disable this control in this region/account and only run it in the region/account where the resource is located.", - "aws/securityhub/FindingId": "arn:aws:securityhub:us-east-2::product/aws/securityhub/arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0/2.3/finding/f51c716c-b33c-4949-b748-2ffd22bdceec" + "aws/securityhub/FindingId": "arn:aws:securityhub:us-east-2::product/aws/securityhub/arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0/2.3/finding/f51c716c-b33c-4949-b748-2ffd22bdceec", }, "AwsAccountId": "111111111111", "Id": "arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0/2.3/finding/f51c716c-b33c-4949-b748-2ffd22bdceec", "Remediation": { "Recommendation": { "Text": "For directions on how to fix this issue, please consult the AWS Security Hub CIS documentation.", - "Url": "https://docs.aws.amazon.com/console/securityhub/standards-cis-2.3/remediation" + "Url": "https://docs.aws.amazon.com/console/securityhub/standards-cis-2.3/remediation", } }, "Resources": [ @@ -76,53 +71,52 @@ def event(): "Partition": "aws", "Type": "AwsS3Bucket", "Region": "us-east-2", - "Id": "arn:aws:s3:::cloudtrail-awslogs-111111111111-kjfskljdfl" + "Id": "arn:aws:s3:::cloudtrail-awslogs-111111111111-kjfskljdfl", } - ] - } + ], + }, } + def expected(): return { - "account_id": '111111111111', - "resource_id": 'cloudtrail-awslogs-111111111111-kjfskljdfl', - "finding_id": 'arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0/2.3/finding/f51c716c-b33c-4949-b748-2ffd22bdceec', - "product_arn": 'arn:aws:securityhub:us-east-2::product/aws/securityhub', - "control_id": '2.3', + "account_id": "111111111111", + "resource_id": "cloudtrail-awslogs-111111111111-kjfskljdfl", + "finding_id": "arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0/2.3/finding/f51c716c-b33c-4949-b748-2ffd22bdceec", + "product_arn": "arn:aws:securityhub:us-east-2::product/aws/securityhub", + "control_id": "2.3", "object": { - "Type": 'AwsS3Bucket', - "Id": 'cloudtrail-awslogs-111111111111-kjfskljdfl', - "OutputKey": 'Remediation.Output' + "Type": "AwsS3Bucket", + "Id": "cloudtrail-awslogs-111111111111-kjfskljdfl", + "OutputKey": "Remediation.Output", }, - "matches": [ "cloudtrail-awslogs-111111111111-kjfskljdfl" ], - 'details': {}, - 'testmode': False, - 'resource': event().get('Finding').get('Resources')[0], - 'resource_region': None, - 'aws_config_rule': { + "matches": ["cloudtrail-awslogs-111111111111-kjfskljdfl"], + "details": {}, + "testmode": False, + "resource": event().get("Finding").get("Resources")[0], + "resource_region": None, + "aws_config_rule": { "ConfigRuleName": "s3-bucket-server-side-encryption-enabled", "ConfigRuleArn": "arn:aws:config:us-east-1:111111111111:config-rule/config-rule-vye3dl", "ConfigRuleId": "config-rule-vye3dl", "Description": "Checks whether the S3 bucket policy denies the put-object requests that are not encrypted using AES-256 or AWS KMS.", - "Scope": { - "ComplianceResourceTypes": [ - "AWS::S3::Bucket" - ] - }, + "Scope": {"ComplianceResourceTypes": ["AWS::S3::Bucket"]}, "Source": { "Owner": "AWS", - "SourceIdentifier": "S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED" + "SourceIdentifier": "S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED", }, "InputParameters": "{}", - "ConfigRuleState": "ACTIVE" - } + "ConfigRuleState": "ACTIVE", + }, + "input_params": {}, } + def cis41_event(): return { - 'expected_control_id': '4.1', - 'parse_id_pattern': '^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-[0-9]):[0-9]{12}:security-group/(sg-[a-f0-9]{8,17})$', - 'Finding': { + "expected_control_id": "4.1", + "parse_id_pattern": "^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-[0-9]):[0-9]{12}:security-group/(sg-[a-f0-9]{8,17})$", + "Finding": { "SchemaVersion": "2018-10-08", "Id": "arn:aws:securityhub:us-east-1:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0/4.1/finding/f371b170-1881-4af0-9a33-840c81d91a04", "ProductArn": "arn:aws:securityhub:us-east-1::product/aws/securityhub", @@ -142,14 +136,14 @@ def cis41_event(): "Product": 70, "Label": "HIGH", "Normalized": 70, - "Original": "HIGH" + "Original": "HIGH", }, "Title": "4.1 Ensure no security groups allow ingress from 0.0.0.0/0 to port 22", "Description": "Security groups provide stateful filtering of ingress/egress network traffic to AWS resources. It is recommended that no security group allows unrestricted ingress access to port 22.", "Remediation": { "Recommendation": { - "Text": "For directions on how to fix this issue, please consult the AWS Security Hub CIS documentation.", - "Url": "https://docs.aws.amazon.com/console/securityhub/standards-cis-4.1/remediation" + "Text": "For directions on how to fix this issue, please consult the AWS Security Hub CIS documentation.", + "Url": "https://docs.aws.amazon.com/console/securityhub/standards-cis-4.1/remediation", } }, "ProductFields": { @@ -163,107 +157,91 @@ def cis41_event(): "aws/securityhub/ProductName": "Security Hub", "aws/securityhub/CompanyName": "AWS", "Resources:0/Id": "arn:aws:ec2:us-east-1:111111111111:security-group/sg-087af114e4ae4c6ea", - "aws/securityhub/FindingId": "arn:aws:securityhub:us-east-1::product/aws/securityhub/arn:aws:securityhub:us-east-1:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0/4.1/finding/f371b170-1881-4af0-9a33-840c81d91a04" + "aws/securityhub/FindingId": "arn:aws:securityhub:us-east-1::product/aws/securityhub/arn:aws:securityhub:us-east-1:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0/4.1/finding/f371b170-1881-4af0-9a33-840c81d91a04", }, "Resources": [ { - "Type": "AwsEc2SecurityGroup", - "Id": "arn:aws:ec2:us-east-1:111111111111:security-group/sg-087af114e4ae4c6ea", - "Partition": "aws", - "Region": "us-east-1", - "Details": { - "AwsEc2SecurityGroup": { - "GroupName": "launch-wizard-17", - "GroupId": "sg-087af114e4ae4c6ea", - "OwnerId": "111111111111", - "VpcId": "vpc-e5b8f483", - "IpPermissions": [ - { - "IpProtocol": "tcp", - "FromPort": 22, - "ToPort": 22, - "IpRanges": [ - { - "CidrIp": "0.0.0.0/0" - } - ] - } - ], - "IpPermissionsEgress": [ - { - "IpProtocol": "-1", - "IpRanges": [ - { - "CidrIp": "0.0.0.0/0" - } - ] + "Type": "AwsEc2SecurityGroup", + "Id": "arn:aws:ec2:us-east-1:111111111111:security-group/sg-087af114e4ae4c6ea", + "Partition": "aws", + "Region": "us-east-1", + "Details": { + "AwsEc2SecurityGroup": { + "GroupName": "launch-wizard-17", + "GroupId": "sg-087af114e4ae4c6ea", + "OwnerId": "111111111111", + "VpcId": "vpc-e5b8f483", + "IpPermissions": [ + { + "IpProtocol": "tcp", + "FromPort": 22, + "ToPort": 22, + "IpRanges": [{"CidrIp": "0.0.0.0/0"}], + } + ], + "IpPermissionsEgress": [ + { + "IpProtocol": "-1", + "IpRanges": [{"CidrIp": "0.0.0.0/0"}], + } + ], } - ] - } - } + }, } ], - "Compliance": { - "Status": "FAILED" - }, + "Compliance": {"Status": "FAILED"}, "WorkflowState": "NEW", - "Workflow": { - "Status": "NOTIFIED" - }, + "Workflow": {"Status": "NOTIFIED"}, "RecordState": "ACTIVE", "Note": { "Text": "Remediation failed for CIS control 4.1 in account 111111111111: No output available yet because the step is not successfully executed", "UpdatedBy": "update_text", - "UpdatedAt": "2021-07-20T18:53:07.918Z" + "UpdatedAt": "2021-07-20T18:53:07.918Z", }, "FindingProviderFields": { - "Severity": { - "Label": "HIGH", - "Original": "HIGH" - }, + "Severity": {"Label": "HIGH", "Original": "HIGH"}, "Types": [ - "Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" - ] - } - } + "Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark" + ], + }, + }, } + def cis41_expected(): return { - "account_id": '111111111111', - "resource_id": 'sg-087af114e4ae4c6ea', - 'testmode': False, - "finding_id": 'arn:aws:securityhub:us-east-1:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0/4.1/finding/f371b170-1881-4af0-9a33-840c81d91a04', - "product_arn": 'arn:aws:securityhub:us-east-1::product/aws/securityhub', - "control_id": '4.1', + "account_id": "111111111111", + "resource_id": "sg-087af114e4ae4c6ea", + "testmode": False, + "finding_id": "arn:aws:securityhub:us-east-1:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0/4.1/finding/f371b170-1881-4af0-9a33-840c81d91a04", + "product_arn": "arn:aws:securityhub:us-east-1::product/aws/securityhub", + "control_id": "4.1", "object": { - "Type": 'AwsEc2SecurityGroup', - "Id": 'sg-087af114e4ae4c6ea', - "OutputKey": 'Remediation.Output' + "Type": "AwsEc2SecurityGroup", + "Id": "sg-087af114e4ae4c6ea", + "OutputKey": "Remediation.Output", }, - "matches": [ "sg-087af114e4ae4c6ea" ], - 'details': cis41_event().get('Finding').get('Resources')[0].get('Details'), - 'resource': cis41_event().get('Finding').get('Resources')[0], - 'resource_region': 'us-east-1', - 'aws_config_rule': { + "matches": ["sg-087af114e4ae4c6ea"], + "details": cis41_event().get("Finding").get("Resources")[0].get("Details"), + "resource": cis41_event().get("Finding").get("Resources")[0], + "resource_region": "us-east-1", + "aws_config_rule": { "ConfigRuleName": "s3-bucket-server-side-encryption-enabled", "ConfigRuleArn": "arn:aws:config:us-east-1:111111111111:config-rule/config-rule-vye3dl", "ConfigRuleId": "config-rule-vye3dl", "Description": "Checks whether the S3 bucket policy denies the put-object requests that are not encrypted using AES-256 or AWS KMS.", - "Scope": { - "ComplianceResourceTypes": [ - "AWS::S3::Bucket" - ] - }, + "Scope": {"ComplianceResourceTypes": ["AWS::S3::Bucket"]}, "Source": { "Owner": "AWS", - "SourceIdentifier": "S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED" + "SourceIdentifier": "S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED", }, "InputParameters": "{}", - "ConfigRuleState": "ACTIVE" - } + "ConfigRuleState": "ACTIVE", + }, + "input_params": {}, } + def config_rule(): return { "ConfigRules": [ @@ -272,151 +250,174 @@ def config_rule(): "ConfigRuleArn": "arn:aws:config:us-east-1:111111111111:config-rule/config-rule-vye3dl", "ConfigRuleId": "config-rule-vye3dl", "Description": "Checks whether the S3 bucket policy denies the put-object requests that are not encrypted using AES-256 or AWS KMS.", - "Scope": { - "ComplianceResourceTypes": [ - "AWS::S3::Bucket" - ] - }, + "Scope": {"ComplianceResourceTypes": ["AWS::S3::Bucket"]}, "Source": { "Owner": "AWS", - "SourceIdentifier": "S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED" + "SourceIdentifier": "S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED", }, "InputParameters": "{}", - "ConfigRuleState": "ACTIVE" + "ConfigRuleState": "ACTIVE", } ] } + def ssm_parm(): return { - 'Parameter': { - 'Name': 'Solutions/SO0111/member_version', - 'Type': 'String', - 'Value': 'v1.5.0' + "Parameter": { + "Name": "Solutions/SO0111/member_version", + "Type": "String", + "Value": "v1.5.0", } } -BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - } -) + +BOTO_CONFIG = Config(retries={"mode": "standard"}) + @pytest.fixture(autouse=True) def run_before_and_after_tests(mocker): - cfg_client = botocore.session.get_session().create_client('config', config=BOTO_CONFIG) - cfg_stubber = Stubber(cfg_client) - cfg_stubber.add_response( - 'describe_config_rules', - config_rule() + cfg_client = botocore.session.get_session().create_client( + "config", config=BOTO_CONFIG ) + cfg_stubber = Stubber(cfg_client) + cfg_stubber.add_response("describe_config_rules", config_rule()) cfg_stubber.activate() - mocker.patch('parse_input.connect_to_config', return_value=cfg_client) + mocker.patch("parse_input.connect_to_config", return_value=cfg_client) - ssm_client = botocore.session.get_session().create_client('ssm', config=BOTO_CONFIG) + ssm_client = botocore.session.get_session().create_client("ssm", config=BOTO_CONFIG) ssm_stubber = Stubber(ssm_client) - ssm_stubber.add_response( - 'get_parameter', - ssm_parm() - ) + ssm_stubber.add_response("get_parameter", ssm_parm()) ssm_stubber.activate() - mocker.patch('parse_input.connect_to_ssm', return_value=ssm_client) + mocker.patch("parse_input.connect_to_ssm", return_value=ssm_client) yield cfg_stubber.deactivate() ssm_stubber.deactivate() + def test_parse_event(mocker): expected_result = expected() - expected_result['finding'] = event().get('Finding') + expected_result["finding"] = event().get("Finding") parsed_event = parse_event(event(), {}) assert parsed_event == expected_result + def test_parse_cis41(mocker): expected_result = cis41_expected() - expected_result['finding'] = cis41_event().get('Finding') + expected_result["finding"] = cis41_event().get("Finding") parsed_event = parse_event(cis41_event(), {}) assert parsed_event == expected_result + def test_parse_event_multimatch(mocker): expected_result = expected() - expected_result['finding'] = event().get('Finding') - expected_result['matches'] = [ - "aws", - "cloudtrail-awslogs-111111111111-kjfskljdfl" - ] + expected_result["finding"] = event().get("Finding") + expected_result["matches"] = ["aws", "cloudtrail-awslogs-111111111111-kjfskljdfl"] test_event = event() - test_event['resource_index'] = 2 - test_event['parse_id_pattern'] = '^arn:((?:aws|aws-cn|aws-us-gov)):s3:::([A-Za-z0-9.-]{3,63})$' + test_event["resource_index"] = 2 + test_event["parse_id_pattern"] = ( + "^arn:((?:aws|aws-cn|aws-us-gov)):s3:::([A-Za-z0-9.-]{3,63})$" + ) parsed_event = parse_event(test_event, {}) assert parsed_event == expected_result + def test_bad_finding_id(mocker): test_event = event() - test_event['Finding']['Id'] = "badvalue" + test_event["Finding"]["Id"] = "badvalue" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Finding Id is invalid: badvalue' + assert pytest_wrapped_e.value.code == "ERROR: Finding Id is invalid: badvalue" + def test_bad_control_id(mocker): test_event = event() - test_event['Finding']['Id'] = "arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0//finding/f51c716c-b33c-4949-b748-2ffd22bdceec" + test_event["Finding"][ + "Id" + ] = "arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0//finding/f51c716c-b33c-4949-b748-2ffd22bdceec" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Finding Id is invalid: arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0//finding/f51c716c-b33c-4949-b748-2ffd22bdceec - missing Control Id' + assert ( + pytest_wrapped_e.value.code + == "ERROR: Finding Id is invalid: arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0//finding/f51c716c-b33c-4949-b748-2ffd22bdceec - missing Control Id" + ) + def test_control_id_nomatch(mocker): test_event = event() - test_event['Finding']['Id'] = "arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0/2.4/finding/f51c716c-b33c-4949-b748-2ffd22bdceec" + test_event["Finding"][ + "Id" + ] = "arn:aws:securityhub:us-east-2:111111111111:subscription/cis-aws-foundations-benchmark/v/1.2.0/2.4/finding/f51c716c-b33c-4949-b748-2ffd22bdceec" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Control Id from input (2.4) does not match 2.3' + assert ( + pytest_wrapped_e.value.code + == "ERROR: Control Id from input (2.4) does not match 2.3" + ) + def test_bad_account_id(mocker): test_event = event() - test_event['Finding']['AwsAccountId'] = "1234123412345" + test_event["Finding"]["AwsAccountId"] = "1234123412345" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: AwsAccountId is invalid: 1234123412345' + assert ( + pytest_wrapped_e.value.code == "ERROR: AwsAccountId is invalid: 1234123412345" + ) + def test_bad_productarn(mocker): test_event = event() - test_event['Finding']['ProductArn'] = "badvalue" + test_event["Finding"]["ProductArn"] = "badvalue" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: ProductArn is invalid: badvalue' + assert pytest_wrapped_e.value.code == "ERROR: ProductArn is invalid: badvalue" + def test_bad_resource_match(mocker): test_event = event() - test_event['parse_id_pattern'] = '^arn:(?:aws|aws-cn|aws-us-gov):logs:::([A-Za-z0-9.-]{3,63})$' + test_event["parse_id_pattern"] = ( + "^arn:(?:aws|aws-cn|aws-us-gov):logs:::([A-Za-z0-9.-]{3,63})$" + ) with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Invalid resource Id arn:aws:s3:::cloudtrail-awslogs-111111111111-kjfskljdfl' + assert ( + pytest_wrapped_e.value.code + == "ERROR: Invalid resource Id arn:aws:s3:::cloudtrail-awslogs-111111111111-kjfskljdfl" + ) + def test_no_resource_pattern(mocker): test_event = event() expected_result = expected() - expected_result['finding'] = event().get('Finding') - test_event['parse_id_pattern'] = '' - expected_result['resource_id'] = 'arn:aws:s3:::cloudtrail-awslogs-111111111111-kjfskljdfl' - expected_result['matches'] = [] - expected_result['object']['Id'] = expected_result['resource_id'] + expected_result["finding"] = event().get("Finding") + test_event["parse_id_pattern"] = "" + expected_result["resource_id"] = ( + "arn:aws:s3:::cloudtrail-awslogs-111111111111-kjfskljdfl" + ) + expected_result["matches"] = [] + expected_result["object"]["Id"] = expected_result["resource_id"] parsed_event = parse_event(test_event, {}) assert parsed_event == expected_result + def test_no_resource_pattern_no_resource_id(mocker): test_event = event() - test_event['parse_id_pattern'] = '' - test_event['Finding']['Resources'][0]['Id'] = '' + test_event["parse_id_pattern"] = "" + test_event["Finding"]["Resources"][0]["Id"] = "" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Resource Id is missing from the finding json Resources (Id)' + assert ( + pytest_wrapped_e.value.code + == "ERROR: Resource Id is missing from the finding json Resources (Id)" + ) diff --git a/source/playbooks/common/test/test_cloudwatch_get_input_values.py b/source/playbooks/common/test/test_cloudwatch_get_input_values.py index 9a5f4a39..2f9d2aa2 100644 --- a/source/playbooks/common/test/test_cloudwatch_get_input_values.py +++ b/source/playbooks/common/test/test_cloudwatch_get_input_values.py @@ -1,23 +1,60 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import pytest +from cloudwatch_get_input_values import routeTableChangesFilter, verify -from cloudwatch_get_input_values import verify, routeTableChangesFilter def expected(): return routeTableChangesFilter + def test_verifyCIS120(): - assert verify({'ControlId': '3.13', 'StandardLongName': 'cis-aws-foundations-benchmark', 'StandardVersion': '1.2.0' }, {}) == expected() + assert ( + verify( + { + "ControlId": "3.13", + "StandardLongName": "cis-aws-foundations-benchmark", + "StandardVersion": "1.2.0", + }, + {}, + ) + == expected() + ) + def test_verifyCIS140(): - assert verify({'ControlId': '4.13', 'StandardLongName': 'cis-aws-foundations-benchmark', 'StandardVersion': '1.4.0' }, {}) == expected() + assert ( + verify( + { + "ControlId": "4.13", + "StandardLongName": "cis-aws-foundations-benchmark", + "StandardVersion": "1.4.0", + }, + {}, + ) + == expected() + ) + def test_verifySC(): - assert verify({'ControlId': 'CloudWatch.13', 'StandardLongName': 'security-control', 'StandardVersion': '2.0.0', }, {}) == expected() + assert ( + verify( + { + "ControlId": "CloudWatch.13", + "StandardLongName": "security-control", + "StandardVersion": "2.0.0", + }, + {}, + ) + == expected() + ) + def test_failNoStandard(): with pytest.raises(SystemExit) as response: - verify({'ControlId': '3.13' }, {}) - - assert response.value.code == "ERROR: Could not find associated metric filter. Missing parameter: 'StandardLongName'" + verify({"ControlId": "3.13"}, {}) + + assert ( + response.value.code + == "ERROR: Could not find associated metric filter. Missing parameter: 'StandardLongName'" + ) diff --git a/source/playbooks/common/test/test_deserialize_json.py b/source/playbooks/common/test/test_deserialize_json.py index f09986fc..c83fda94 100644 --- a/source/playbooks/common/test/test_deserialize_json.py +++ b/source/playbooks/common/test/test_deserialize_json.py @@ -1,13 +1,14 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import json + from deserialize_json import event_handler + def event(object): - return { - 'SerializedJson': json.dumps(object) - } + return {"SerializedJson": json.dumps(object)} + def test_deserialize(): - object = {'MinRetentionPeriod': '7'} + object = {"MinRetentionPeriod": "7"} assert event_handler(event(object), {}) == object diff --git a/source/playbooks/common/test/test_pci321_parse.py b/source/playbooks/common/test/test_pci321_parse.py index 56c4ace1..a5c1a1a0 100644 --- a/source/playbooks/common/test/test_pci321_parse.py +++ b/source/playbooks/common/test/test_pci321_parse.py @@ -1,20 +1,17 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import boto3 -import json import botocore.session -from botocore.stub import Stubber -from botocore.config import Config import pytest -from pytest_mock import mocker - +from botocore.config import Config +from botocore.stub import Stubber from parse_input import parse_event + def event(): return { - 'expected_control_id': 'PCI.IAM.6', - 'parse_id_pattern': '^arn:aws:iam::[0-9]{12}:user/([A-Za-z0-9+=,.@-]{1,64})$', - 'Finding': { + "expected_control_id": "PCI.IAM.6", + "parse_id_pattern": "^arn:aws:iam::[0-9]{12}:user/([A-Za-z0-9+=,.@-]{1,64})$", + "Finding": { "SchemaVersion": "2018-10-08", "Id": "arn:aws:securityhub:us-east-1:111111111111:subscription/pci-dss/v/3.2.1/PCI.IAM.6/finding/fec91aaf-5016-4c40-9d24-9966e4be80c4", "ProductArn": "arn:aws:securityhub:us-east-1::product/aws/securityhub", @@ -31,14 +28,14 @@ def event(): "Product": 40, "Label": "MEDIUM", "Normalized": 40, - "Original": "MEDIUM" + "Original": "MEDIUM", }, "Title": "PCI.IAM.6 MFA should be enabled for all IAM users", "Description": "This AWS control checks whether the AWS Identity and Access Management users have multi-factor authentication (MFA) enabled.", "Remediation": { "Recommendation": { - "Text": "For directions on how to fix this issue, please consult the AWS Security Hub PCI DSS documentation.", - "Url": "https://docs.aws.amazon.com/console/securityhub/PCI.IAM.6/remediation" + "Text": "For directions on how to fix this issue, please consult the AWS Security Hub PCI DSS documentation.", + "Url": "https://docs.aws.amazon.com/console/securityhub/PCI.IAM.6/remediation", } }, "ProductFields": { @@ -52,63 +49,64 @@ def event(): "aws/securityhub/ProductName": "Security Hub", "aws/securityhub/CompanyName": "AWS", "Resources:0/Id": "arn:aws:iam::111111111111:user/foo-bar@baz", - "aws/securityhub/FindingId": "arn:aws:securityhub:us-east-1::product/aws/securityhub/arn:aws:securityhub:us-east-1:111111111111:subscription/pci-dss/v/3.2.1/PCI.IAM.6/finding/fec91aaf-5016-4c40-9d24-9966e4be80c4" + "aws/securityhub/FindingId": "arn:aws:securityhub:us-east-1::product/aws/securityhub/arn:aws:securityhub:us-east-1:111111111111:subscription/pci-dss/v/3.2.1/PCI.IAM.6/finding/fec91aaf-5016-4c40-9d24-9966e4be80c4", }, "Resources": [ { - "Type": "AwsIamUser", - "Id": "arn:aws:iam::111111111111:user/foo-bar@baz", - "Partition": "aws", - "Region": "us-east-1", - "Details": { - "AwsIamUser": { - "CreateDate": "2016-09-23T12:42:13.000Z", - "Path": "/", - "UserId": "AIDAIMALBCBBI4ZZHJVTO", - "UserName": "foo-bar@baz" - } - } + "Type": "AwsIamUser", + "Id": "arn:aws:iam::111111111111:user/foo-bar@baz", + "Partition": "aws", + "Region": "us-east-1", + "Details": { + "AwsIamUser": { + "CreateDate": "2016-09-23T12:42:13.000Z", + "Path": "/", + "UserId": "AIDAIMALBCBBI4ZZHJVTO", + "UserName": "foo-bar@baz", + } + }, } ], "Compliance": { "Status": "FAILED", - "RelatedRequirements": [ - "PCI DSS 8.3.1" - ] + "RelatedRequirements": ["PCI DSS 8.3.1"], }, "WorkflowState": "NEW", - "Workflow": { - "Status": "NEW" - }, + "Workflow": {"Status": "NEW"}, "RecordState": "ACTIVE", "FindingProviderFields": { - "Severity": { - "Label": "MEDIUM", - "Original": "MEDIUM" - }, + "Severity": {"Label": "MEDIUM", "Original": "MEDIUM"}, "Types": [ - "Software and Configuration Checks/Industry and Regulatory Standards/PCI-DSS" - ] - } - } + "Software and Configuration Checks/Industry and Regulatory Standards/PCI-DSS" + ], + }, + }, } + def expected(): return { - "account_id": '111111111111', - "resource_id": 'foo-bar@baz', - 'control_id': 'PCI.IAM.6', - 'testmode': False, - "finding_id": 'arn:aws:securityhub:us-east-1:111111111111:subscription/pci-dss/v/3.2.1/PCI.IAM.6/finding/fec91aaf-5016-4c40-9d24-9966e4be80c4', - "product_arn": 'arn:aws:securityhub:us-east-1::product/aws/securityhub', + "account_id": "111111111111", + "resource_id": "foo-bar@baz", + "control_id": "PCI.IAM.6", + "testmode": False, + "finding_id": "arn:aws:securityhub:us-east-1:111111111111:subscription/pci-dss/v/3.2.1/PCI.IAM.6/finding/fec91aaf-5016-4c40-9d24-9966e4be80c4", + "product_arn": "arn:aws:securityhub:us-east-1::product/aws/securityhub", "object": { - "Type": 'AwsIamUser', - "Id": 'foo-bar@baz', - "OutputKey": 'Remediation.Output' + "Type": "AwsIamUser", + "Id": "foo-bar@baz", + "OutputKey": "Remediation.Output", }, - "matches": [ "foo-bar@baz" ], - 'details': {'AwsIamUser': {'CreateDate': '2016-09-23T12:42:13.000Z', 'Path': '/', 'UserId': 'AIDAIMALBCBBI4ZZHJVTO', 'UserName': 'foo-bar@baz'}}, - 'resource': { + "matches": ["foo-bar@baz"], + "details": { + "AwsIamUser": { + "CreateDate": "2016-09-23T12:42:13.000Z", + "Path": "/", + "UserId": "AIDAIMALBCBBI4ZZHJVTO", + "UserName": "foo-bar@baz", + } + }, + "resource": { "Type": "AwsIamUser", "Id": "arn:aws:iam::111111111111:user/foo-bar@baz", "Partition": "aws", @@ -118,30 +116,28 @@ def expected(): "CreateDate": "2016-09-23T12:42:13.000Z", "Path": "/", "UserId": "AIDAIMALBCBBI4ZZHJVTO", - "UserName": "foo-bar@baz" - } + "UserName": "foo-bar@baz", } }, + }, "resource_region": None, - 'aws_config_rule': { + "aws_config_rule": { "ConfigRuleName": "s3-bucket-server-side-encryption-enabled", "ConfigRuleArn": "arn:aws:config:us-east-1:111111111111:config-rule/config-rule-vye3dl", "ConfigRuleId": "config-rule-vye3dl", "Description": "Checks whether the S3 bucket policy denies the put-object requests that are not encrypted using AES-256 or AWS KMS.", - "Scope": { - "ComplianceResourceTypes": [ - "AWS::S3::Bucket" - ] - }, + "Scope": {"ComplianceResourceTypes": ["AWS::S3::Bucket"]}, "Source": { "Owner": "AWS", - "SourceIdentifier": "S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED" + "SourceIdentifier": "S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED", }, "InputParameters": "{}", - "ConfigRuleState": "ACTIVE" - } + "ConfigRuleState": "ACTIVE", + }, + "input_params": {}, } + def config_rule(): return { "ConfigRules": [ @@ -150,144 +146,165 @@ def config_rule(): "ConfigRuleArn": "arn:aws:config:us-east-1:111111111111:config-rule/config-rule-vye3dl", "ConfigRuleId": "config-rule-vye3dl", "Description": "Checks whether the S3 bucket policy denies the put-object requests that are not encrypted using AES-256 or AWS KMS.", - "Scope": { - "ComplianceResourceTypes": [ - "AWS::S3::Bucket" - ] - }, + "Scope": {"ComplianceResourceTypes": ["AWS::S3::Bucket"]}, "Source": { "Owner": "AWS", - "SourceIdentifier": "S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED" + "SourceIdentifier": "S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED", }, "InputParameters": "{}", - "ConfigRuleState": "ACTIVE" + "ConfigRuleState": "ACTIVE", } ] } + def ssm_parm(): return { - 'Parameter': { - 'Name': 'Solutions/SO0111/member_version', - 'Type': 'String', - 'Value': 'v1.5.0' + "Parameter": { + "Name": "Solutions/SO0111/member_version", + "Type": "String", + "Value": "v1.5.0", } } -BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - } -) + + +BOTO_CONFIG = Config(retries={"mode": "standard"}) + @pytest.fixture(autouse=True) def run_before_and_after_tests(mocker): - cfg_client = botocore.session.get_session().create_client('config', config=BOTO_CONFIG) - cfg_stubber = Stubber(cfg_client) - cfg_stubber.add_response( - 'describe_config_rules', - config_rule() + cfg_client = botocore.session.get_session().create_client( + "config", config=BOTO_CONFIG ) + cfg_stubber = Stubber(cfg_client) + cfg_stubber.add_response("describe_config_rules", config_rule()) cfg_stubber.activate() - mocker.patch('parse_input.connect_to_config', return_value=cfg_client) + mocker.patch("parse_input.connect_to_config", return_value=cfg_client) - ssm_client = botocore.session.get_session().create_client('ssm', config=BOTO_CONFIG) + ssm_client = botocore.session.get_session().create_client("ssm", config=BOTO_CONFIG) ssm_stubber = Stubber(ssm_client) - ssm_stubber.add_response( - 'get_parameter', - ssm_parm() - ) + ssm_stubber.add_response("get_parameter", ssm_parm()) ssm_stubber.activate() - mocker.patch('parse_input.connect_to_ssm', return_value=ssm_client) + mocker.patch("parse_input.connect_to_ssm", return_value=ssm_client) yield cfg_stubber.deactivate() ssm_stubber.deactivate() + def test_parse_event(mocker): expected_result = expected() - expected_result['finding'] = event().get('Finding') + expected_result["finding"] = event().get("Finding") parsed_event = parse_event(event(), {}) assert parsed_event == expected_result + def test_parse_event_multimatch(mocker): expected_result = expected() - expected_result['finding'] = event().get('Finding') - expected_result['matches'] = [ - "iam", - "foo-bar@baz" - ] + expected_result["finding"] = event().get("Finding") + expected_result["matches"] = ["iam", "foo-bar@baz"] test_event = event() - test_event['resource_index'] = 2 - test_event['parse_id_pattern'] = '^arn:aws:(.*?)::[0-9]{12}:user/([A-Za-z0-9+=,.@-]{1,64})$' + test_event["resource_index"] = 2 + test_event["parse_id_pattern"] = ( + "^arn:aws:(.*?)::[0-9]{12}:user/([A-Za-z0-9+=,.@-]{1,64})$" + ) parsed_event = parse_event(test_event, {}) assert parsed_event == expected_result + def test_bad_finding_id(mocker): test_event = event() - test_event['Finding']['Id'] = "badvalue" + test_event["Finding"]["Id"] = "badvalue" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Finding Id is invalid: badvalue' + assert pytest_wrapped_e.value.code == "ERROR: Finding Id is invalid: badvalue" + def test_bad_control_id(mocker): test_event = event() - test_event['Finding']['Id'] = "arn:aws:securityhub:us-east-1:111111111111:subscription/pci-dss/v/3.2.1//finding/fec91aaf-5016-4c40-9d24-9966e4be80c4" + test_event["Finding"][ + "Id" + ] = "arn:aws:securityhub:us-east-1:111111111111:subscription/pci-dss/v/3.2.1//finding/fec91aaf-5016-4c40-9d24-9966e4be80c4" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Finding Id is invalid: arn:aws:securityhub:us-east-1:111111111111:subscription/pci-dss/v/3.2.1//finding/fec91aaf-5016-4c40-9d24-9966e4be80c4 - missing Control Id' + assert ( + pytest_wrapped_e.value.code + == "ERROR: Finding Id is invalid: arn:aws:securityhub:us-east-1:111111111111:subscription/pci-dss/v/3.2.1//finding/fec91aaf-5016-4c40-9d24-9966e4be80c4 - missing Control Id" + ) + def test_control_id_nomatch(mocker): test_event = event() - test_event['Finding']['Id'] = "arn:aws:securityhub:us-east-2:111111111111:subscription/pci-dss/v/3.2.1/2.4/finding/fec91aaf-5016-4c40-9d24-9966e4be80c4" + test_event["Finding"][ + "Id" + ] = "arn:aws:securityhub:us-east-2:111111111111:subscription/pci-dss/v/3.2.1/2.4/finding/fec91aaf-5016-4c40-9d24-9966e4be80c4" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Control Id from input (2.4) does not match PCI.IAM.6' + assert ( + pytest_wrapped_e.value.code + == "ERROR: Control Id from input (2.4) does not match PCI.IAM.6" + ) + def test_bad_account_id(mocker): test_event = event() - test_event['Finding']['AwsAccountId'] = "1234123412345" + test_event["Finding"]["AwsAccountId"] = "1234123412345" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: AwsAccountId is invalid: 1234123412345' + assert ( + pytest_wrapped_e.value.code == "ERROR: AwsAccountId is invalid: 1234123412345" + ) + def test_bad_productarn(mocker): test_event = event() - test_event['Finding']['ProductArn'] = "badvalue" + test_event["Finding"]["ProductArn"] = "badvalue" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: ProductArn is invalid: badvalue' + assert pytest_wrapped_e.value.code == "ERROR: ProductArn is invalid: badvalue" + def test_bad_resource_match(mocker): test_event = event() - test_event['parse_id_pattern'] = '^arn:(?:aws|aws-cn|aws-us-gov):logs:::([A-Za-z0-9.-]{3,63})$' + test_event["parse_id_pattern"] = ( + "^arn:(?:aws|aws-cn|aws-us-gov):logs:::([A-Za-z0-9.-]{3,63})$" + ) with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Invalid resource Id arn:aws:iam::111111111111:user/foo-bar@baz' + assert ( + pytest_wrapped_e.value.code + == "ERROR: Invalid resource Id arn:aws:iam::111111111111:user/foo-bar@baz" + ) + def test_no_resource_pattern(mocker): test_event = event() expected_result = expected() - expected_result['finding'] = event().get('Finding') - test_event['parse_id_pattern'] = '' - expected_result['resource_id'] = 'arn:aws:iam::111111111111:user/foo-bar@baz' - expected_result['matches'] = [] - expected_result['object']['Id'] = expected_result['resource_id'] + expected_result["finding"] = event().get("Finding") + test_event["parse_id_pattern"] = "" + expected_result["resource_id"] = "arn:aws:iam::111111111111:user/foo-bar@baz" + expected_result["matches"] = [] + expected_result["object"]["Id"] = expected_result["resource_id"] parsed_event = parse_event(test_event, {}) assert parsed_event == expected_result + def test_no_resource_pattern_no_resource_id(mocker): test_event = event() - test_event['parse_id_pattern'] = '' - test_event['Finding']['Resources'][0]['Id'] = '' + test_event["parse_id_pattern"] = "" + test_event["Finding"]["Resources"][0]["Id"] = "" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Resource Id is missing from the finding json Resources (Id)' + assert ( + pytest_wrapped_e.value.code + == "ERROR: Resource Id is missing from the finding json Resources (Id)" + ) diff --git a/source/playbooks/common/test/test_sc_parse.py b/source/playbooks/common/test/test_sc_parse.py index 13bb1692..937c4ac0 100644 --- a/source/playbooks/common/test/test_sc_parse.py +++ b/source/playbooks/common/test/test_sc_parse.py @@ -1,20 +1,17 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import boto3 -import json import botocore.session -from botocore.stub import Stubber -from botocore.config import Config import pytest -from pytest_mock import mocker - +from botocore.config import Config +from botocore.stub import Stubber from parse_input import parse_event + def event(): return { - 'expected_control_id': 'S3.12', - 'parse_id_pattern': '^arn:aws:s3:::(.*)$', - 'Finding': { + "expected_control_id": "S3.12", + "parse_id_pattern": "^arn:aws:s3:::(.*)$", + "Finding": { "SchemaVersion": "2018-10-08", "Id": "arn:aws:securityhub:us-east-1:111111111111:security-control/S3.12/finding/c5dcc868-c633-448d-92c7-bb19bbdcfe00", "ProductArn": "arn:aws:securityhub:us-east-1::product/aws/securityhub", @@ -33,14 +30,14 @@ def event(): "Severity": { "Label": "INFORMATIONAL", "Normalized": 0, - "Original": "INFORMATIONAL" + "Original": "INFORMATIONAL", }, "Title": "S3 access control lists (ACLs) should not be used to manage user access to buckets", "Description": "This control checks if S3 buckets allow user permissions via access control lists (ACLs). This control fails if ACLs are configured for user access on S3 Bucket.", "Remediation": { "Recommendation": { - "Text": "For information on how to correct this issue, consult the AWS Security Hub controls documentation.", - "Url": "https://docs.aws.amazon.com/console/securityhub/S3.12/remediation" + "Text": "For information on how to correct this issue, consult the AWS Security Hub controls documentation.", + "Url": "https://docs.aws.amazon.com/console/securityhub/S3.12/remediation", } }, "ProductFields": { @@ -49,85 +46,84 @@ def event(): "aws/securityhub/ProductName": "Security Hub", "aws/securityhub/CompanyName": "AWS", "Resources:0/Id": "arn:aws:s3:::asr-scv-reference", - "aws/securityhub/FindingId": "arn:aws:securityhub:us-east-1::product/aws/securityhub/arn:aws:securityhub:us-east-1:111111111111:security-control/S3.12/finding/c5dcc868-c633-448d-92c7-bb19bbdcfe00" + "aws/securityhub/FindingId": "arn:aws:securityhub:us-east-1::product/aws/securityhub/arn:aws:securityhub:us-east-1:111111111111:security-control/S3.12/finding/c5dcc868-c633-448d-92c7-bb19bbdcfe00", }, "Resources": [ { - "Type": "AwsS3Bucket", - "Id": "arn:aws:s3:::asr-scv-reference", - "Partition": "aws", - "Region": "us-east-1", - "Details": { - "AwsS3Bucket": { - "OwnerId": "93b93c44fd03f06ac297d9923da9bf86507301cfc9485b1d29c992241afd5182", - "CreatedAt": "2022-10-27T20:32:54.000Z" - } - } + "Type": "AwsS3Bucket", + "Id": "arn:aws:s3:::asr-scv-reference", + "Partition": "aws", + "Region": "us-east-1", + "Details": { + "AwsS3Bucket": { + "OwnerId": "93b93c44fd03f06ac297d9923da9bf86507301cfc9485b1d29c992241afd5182", + "CreatedAt": "2022-10-27T20:32:54.000Z", + } + }, } ], "Compliance": { "Status": "PASSED", "SecurityControlId": "S3.12", "AssociatedStandards": [ - { - "StandardsId": "standards/aws-foundational-security-best-practices/v/1.0.0" - } - ] + { + "StandardsId": "standards/aws-foundational-security-best-practices/v/1.0.0" + } + ], }, "WorkflowState": "NEW", - "Workflow": { - "Status": "NEW" - }, + "Workflow": {"Status": "NEW"}, "RecordState": "ACTIVE", "FindingProviderFields": { - "Severity": { - "Label": "INFORMATIONAL", - "Original": "INFORMATIONAL" - }, + "Severity": {"Label": "INFORMATIONAL", "Original": "INFORMATIONAL"}, "Types": [ - "Software and Configuration Checks/Industry and Regulatory Standards" - ] + "Software and Configuration Checks/Industry and Regulatory Standards" + ], }, - "ProcessedAt": "2023-02-24T16:33:23.639Z" - } + "ProcessedAt": "2023-02-24T16:33:23.639Z", + }, } + def expected(): return { - "account_id": '111111111111', - "resource_id": 'asr-scv-reference', - 'control_id': 'S3.12', - 'testmode': False, - "finding_id": 'arn:aws:securityhub:us-east-1:111111111111:security-control/S3.12/finding/c5dcc868-c633-448d-92c7-bb19bbdcfe00', - "product_arn": 'arn:aws:securityhub:us-east-1::product/aws/securityhub', + "account_id": "111111111111", + "resource_id": "asr-scv-reference", + "control_id": "S3.12", + "testmode": False, + "finding_id": "arn:aws:securityhub:us-east-1:111111111111:security-control/S3.12/finding/c5dcc868-c633-448d-92c7-bb19bbdcfe00", + "product_arn": "arn:aws:securityhub:us-east-1::product/aws/securityhub", "object": { - "Type": 'AwsS3Bucket', - "Id": 'asr-scv-reference', - "OutputKey": 'Remediation.Output' + "Type": "AwsS3Bucket", + "Id": "asr-scv-reference", + "OutputKey": "Remediation.Output", + }, + "matches": ["asr-scv-reference"], + "details": { + "AwsS3Bucket": { + "CreatedAt": "2022-10-27T20:32:54.000Z", + "OwnerId": "93b93c44fd03f06ac297d9923da9bf86507301cfc9485b1d29c992241afd5182", + } }, - "matches": [ "asr-scv-reference" ], - 'details': {'AwsS3Bucket': {'CreatedAt': '2022-10-27T20:32:54.000Z','OwnerId': '93b93c44fd03f06ac297d9923da9bf86507301cfc9485b1d29c992241afd5182'}}, - 'resource': event().get('Finding').get('Resources')[0], + "resource": event().get("Finding").get("Resources")[0], "resource_region": None, - 'aws_config_rule': { + "aws_config_rule": { "ConfigRuleName": "s3-bucket-server-side-encryption-enabled", "ConfigRuleArn": "arn:aws:config:us-east-1:111111111111:config-rule/config-rule-vye3dl", "ConfigRuleId": "config-rule-vye3dl", "Description": "Checks whether the S3 bucket policy denies the put-object requests that are not encrypted using AES-256 or AWS KMS.", - "Scope": { - "ComplianceResourceTypes": [ - "AWS::S3::Bucket" - ] - }, + "Scope": {"ComplianceResourceTypes": ["AWS::S3::Bucket"]}, "Source": { "Owner": "AWS", - "SourceIdentifier": "S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED" + "SourceIdentifier": "S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED", }, "InputParameters": "{}", - "ConfigRuleState": "ACTIVE" - } + "ConfigRuleState": "ACTIVE", + }, + "input_params": {}, } + def config_rule(): return { "ConfigRules": [ @@ -136,144 +132,163 @@ def config_rule(): "ConfigRuleArn": "arn:aws:config:us-east-1:111111111111:config-rule/config-rule-vye3dl", "ConfigRuleId": "config-rule-vye3dl", "Description": "Checks whether the S3 bucket policy denies the put-object requests that are not encrypted using AES-256 or AWS KMS.", - "Scope": { - "ComplianceResourceTypes": [ - "AWS::S3::Bucket" - ] - }, + "Scope": {"ComplianceResourceTypes": ["AWS::S3::Bucket"]}, "Source": { "Owner": "AWS", - "SourceIdentifier": "S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED" + "SourceIdentifier": "S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED", }, "InputParameters": "{}", - "ConfigRuleState": "ACTIVE" + "ConfigRuleState": "ACTIVE", } ] } + def ssm_parm(): return { - 'Parameter': { - 'Name': 'Solutions/SO0111/member_version', - 'Type': 'String', - 'Value': 'v1.5.0' + "Parameter": { + "Name": "Solutions/SO0111/member_version", + "Type": "String", + "Value": "v1.5.0", } } -BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - } -) + + +BOTO_CONFIG = Config(retries={"mode": "standard"}) + @pytest.fixture(autouse=True) def run_before_and_after_tests(mocker): - cfg_client = botocore.session.get_session().create_client('config', config=BOTO_CONFIG) - cfg_stubber = Stubber(cfg_client) - cfg_stubber.add_response( - 'describe_config_rules', - config_rule() + cfg_client = botocore.session.get_session().create_client( + "config", config=BOTO_CONFIG ) + cfg_stubber = Stubber(cfg_client) + cfg_stubber.add_response("describe_config_rules", config_rule()) cfg_stubber.activate() - mocker.patch('parse_input.connect_to_config', return_value=cfg_client) + mocker.patch("parse_input.connect_to_config", return_value=cfg_client) - ssm_client = botocore.session.get_session().create_client('ssm', config=BOTO_CONFIG) + ssm_client = botocore.session.get_session().create_client("ssm", config=BOTO_CONFIG) ssm_stubber = Stubber(ssm_client) - ssm_stubber.add_response( - 'get_parameter', - ssm_parm() - ) + ssm_stubber.add_response("get_parameter", ssm_parm()) ssm_stubber.activate() - mocker.patch('parse_input.connect_to_ssm', return_value=ssm_client) + mocker.patch("parse_input.connect_to_ssm", return_value=ssm_client) yield cfg_stubber.deactivate() ssm_stubber.deactivate() + def test_parse_event(mocker): expected_result = expected() - expected_result['finding'] = event().get('Finding') + expected_result["finding"] = event().get("Finding") parsed_event = parse_event(event(), {}) assert parsed_event == expected_result + def test_parse_event_multimatch(mocker): expected_result = expected() - expected_result['finding'] = event().get('Finding') - expected_result['matches'] = [ - "s3", - "asr-scv-reference" - ] + expected_result["finding"] = event().get("Finding") + expected_result["matches"] = ["s3", "asr-scv-reference"] test_event = event() - test_event['resource_index'] = 2 - test_event['parse_id_pattern'] = '^arn:aws:(.*):::(.*)$' + test_event["resource_index"] = 2 + test_event["parse_id_pattern"] = "^arn:aws:(.*):::(.*)$" parsed_event = parse_event(test_event, {}) assert parsed_event == expected_result + def test_bad_finding_id(mocker): test_event = event() - test_event['Finding']['Id'] = "badvalue" + test_event["Finding"]["Id"] = "badvalue" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Finding Id is invalid: badvalue' + assert pytest_wrapped_e.value.code == "ERROR: Finding Id is invalid: badvalue" + def test_bad_control_id(mocker): test_event = event() - test_event['Finding']['Id'] = "arn:aws:securityhub:us-east-1:111111111111:subscription/pci-dss/v/3.2.1//finding/fec91aaf-5016-4c40-9d24-9966e4be80c4" + test_event["Finding"][ + "Id" + ] = "arn:aws:securityhub:us-east-1:111111111111:subscription/pci-dss/v/3.2.1//finding/fec91aaf-5016-4c40-9d24-9966e4be80c4" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Finding Id is invalid: arn:aws:securityhub:us-east-1:111111111111:subscription/pci-dss/v/3.2.1//finding/fec91aaf-5016-4c40-9d24-9966e4be80c4 - missing Control Id' + assert ( + pytest_wrapped_e.value.code + == "ERROR: Finding Id is invalid: arn:aws:securityhub:us-east-1:111111111111:subscription/pci-dss/v/3.2.1//finding/fec91aaf-5016-4c40-9d24-9966e4be80c4 - missing Control Id" + ) + def test_control_id_nomatch(mocker): test_event = event() - test_event['Finding']['Id'] = "arn:aws:securityhub:us-east-2:111111111111:subscription/pci-dss/v/3.2.1/2.4/finding/fec91aaf-5016-4c40-9d24-9966e4be80c4" + test_event["Finding"][ + "Id" + ] = "arn:aws:securityhub:us-east-2:111111111111:subscription/pci-dss/v/3.2.1/2.4/finding/fec91aaf-5016-4c40-9d24-9966e4be80c4" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Control Id from input (2.4) does not match S3.12' + assert ( + pytest_wrapped_e.value.code + == "ERROR: Control Id from input (2.4) does not match S3.12" + ) + def test_bad_account_id(mocker): test_event = event() - test_event['Finding']['AwsAccountId'] = "1234123412345" + test_event["Finding"]["AwsAccountId"] = "1234123412345" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: AwsAccountId is invalid: 1234123412345' + assert ( + pytest_wrapped_e.value.code == "ERROR: AwsAccountId is invalid: 1234123412345" + ) + def test_bad_productarn(mocker): test_event = event() - test_event['Finding']['ProductArn'] = "badvalue" + test_event["Finding"]["ProductArn"] = "badvalue" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: ProductArn is invalid: badvalue' + assert pytest_wrapped_e.value.code == "ERROR: ProductArn is invalid: badvalue" + def test_bad_resource_match(mocker): test_event = event() - test_event['parse_id_pattern'] = '^arn:(?:aws|aws-cn|aws-us-gov):logs:::([A-Za-z0-9.-]{3,63})$' + test_event["parse_id_pattern"] = ( + "^arn:(?:aws|aws-cn|aws-us-gov):logs:::([A-Za-z0-9.-]{3,63})$" + ) with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Invalid resource Id arn:aws:s3:::asr-scv-reference' + assert ( + pytest_wrapped_e.value.code + == "ERROR: Invalid resource Id arn:aws:s3:::asr-scv-reference" + ) + def test_no_resource_pattern(mocker): test_event = event() expected_result = expected() - expected_result['finding'] = event().get('Finding') - test_event['parse_id_pattern'] = '' - expected_result['resource_id'] = 'arn:aws:s3:::asr-scv-reference' - expected_result['matches'] = [] - expected_result['object']['Id'] = expected_result['resource_id'] + expected_result["finding"] = event().get("Finding") + test_event["parse_id_pattern"] = "" + expected_result["resource_id"] = "arn:aws:s3:::asr-scv-reference" + expected_result["matches"] = [] + expected_result["object"]["Id"] = expected_result["resource_id"] parsed_event = parse_event(test_event, {}) assert parsed_event == expected_result + def test_no_resource_pattern_no_resource_id(mocker): test_event = event() - test_event['parse_id_pattern'] = '' - test_event['Finding']['Resources'][0]['Id'] = '' + test_event["parse_id_pattern"] = "" + test_event["Finding"]["Resources"][0]["Id"] = "" with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = parse_event(test_event, {}) + parse_event(test_event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'ERROR: Resource Id is missing from the finding json Resources (Id)' + assert ( + pytest_wrapped_e.value.code + == "ERROR: Resource Id is missing from the finding json Resources (Id)" + ) diff --git a/source/remediation_runbooks/BlockSSMDocumentPublicAccess.yaml b/source/remediation_runbooks/BlockSSMDocumentPublicAccess.yaml new file mode 100644 index 00000000..abe10460 --- /dev/null +++ b/source/remediation_runbooks/BlockSSMDocumentPublicAccess.yaml @@ -0,0 +1,48 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +description: | + ### Document Name - ASR-BlockSSMDocumentPublicAccess + + ## What does this document do? + This document modifies SSM document permissions to prevent cross-account public access. + + ## Input Parameters + * DocumentArn: (Required) SSM Document name to be changed. + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + + ## Output Parameters + * BlockSSMDocumentPublicAccess.Output + +schemaVersion: "0.3" +assumeRole: "{{ AutomationAssumeRole }}" +parameters: + DocumentArn: + type: String + description: (Required) The document ARN. + allowedPattern: '^(arn:(?:aws|aws-cn|aws-us-gov):ssm:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:document\/[A-Za-z0-9][A-Za-z0-9\-_]{1,254})$' + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' +outputs: + - BlockSSMDocumentPublicAccess.Output +mainSteps: + - + name: BlockSSMDocumentPublicAccess + action: 'aws:executeScript' + description: | + ## Remediation + Removes public access to the SSM Document + timeoutSeconds: 600 + inputs: + InputPayload: + document_arn: '{{DocumentArn}}' + Runtime: python3.8 + Handler: lambda_handler + Script: |- + %%SCRIPT=block_ssm_doc_public_access.py%% + outputs: + - Name: Output + Selector: $.Payload.response + Type: StringMap diff --git a/source/remediation_runbooks/ConfigureSNSTopicForStack.yaml b/source/remediation_runbooks/ConfigureSNSTopicForStack.yaml index 1a992fcc..b4675884 100644 --- a/source/remediation_runbooks/ConfigureSNSTopicForStack.yaml +++ b/source/remediation_runbooks/ConfigureSNSTopicForStack.yaml @@ -13,7 +13,7 @@ description: | * StackArn: (Required) The ARN of the stack. ## Security Standards / Controls - * AFSBP v1.0.0: CloudFormation.1 + * AWS FSBP v1.0.0: CloudFormation.1 assumeRole: '{{ AutomationAssumeRole }}' parameters: AutomationAssumeRole: diff --git a/source/remediation_runbooks/CreateCloudTrailMultiRegionTrail.yaml b/source/remediation_runbooks/CreateCloudTrailMultiRegionTrail.yaml index 386f6321..78fc7151 100644 --- a/source/remediation_runbooks/CreateCloudTrailMultiRegionTrail.yaml +++ b/source/remediation_runbooks/CreateCloudTrailMultiRegionTrail.yaml @@ -12,7 +12,7 @@ description: | * KMSKeyArn (from SSM): Arn of the KMS key to be used to encrypt data ## Security Standards / Controls - * AFSBP v1.0.0: CloudTrail.1 + * AWS FSBP v1.0.0: CloudTrail.1 * CIS v1.2.0: 2.1 * PCI: CloudTrail.2 diff --git a/source/remediation_runbooks/DisablePublicIPAutoAssign.yaml b/source/remediation_runbooks/DisablePublicIPAutoAssign.yaml index e9e2089d..4bcab598 100644 --- a/source/remediation_runbooks/DisablePublicIPAutoAssign.yaml +++ b/source/remediation_runbooks/DisablePublicIPAutoAssign.yaml @@ -14,7 +14,7 @@ description: | * SubnetARN: (Required) The ARN of the Amazon EC2 Subnet. ## Security Standards / Controls - * AFSBP v1.0.0: EC2.15 + * AWS FSBP v1.0.0: EC2.15 assumeRole: "{{ AutomationAssumeRole }}" parameters: diff --git a/source/remediation_runbooks/DisableTGWAutoAcceptSharedAttachments.yaml b/source/remediation_runbooks/DisableTGWAutoAcceptSharedAttachments.yaml new file mode 100644 index 00000000..e18d73e4 --- /dev/null +++ b/source/remediation_runbooks/DisableTGWAutoAcceptSharedAttachments.yaml @@ -0,0 +1,49 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +schemaVersion: "0.3" +description: | + ### Document name - ASR-DisableTGWAutoAcceptSharedAttachments + + ## What does this document do? + This document turns off AutoAcceptSharedAttachments on a transit gateway to ensure that only authorized VPC attachment requests are accepted. + [ModifyTransitGateway](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_ModifyTransitGateway.html) API. + + + ## Input Parameters + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + * TransitGatewayId: (Required) The Id of the transit gateway. + + ## Security Standards / Controls + * AFSBP v1.0.0: EC2.23 + + +assumeRole: "{{ AutomationAssumeRole }}" +parameters: + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + TransitGatewayId: + type: String + description: (Required) The Id of the Transit Gateway. + allowedPattern: '^tgw-[a-z0-9\-]+$' + +outputs: + - DisableTGWAutoAcceptSharedAttachments.Output +mainSteps: +- name: 'DisableTGWAutoAcceptSharedAttachments' + action: 'aws:executeScript' + maxAttempts: 3 + timeoutSeconds: 600 + inputs: + InputPayload: + TransitGatewayId: '{{ TransitGatewayId }}' + Runtime: 'python3.8' + Handler: 'lambda_handler' + Script: |- + %%SCRIPT=DisableTGWAutoAcceptSharedAttachments.py%% + outputs: + - Name: 'Output' + Selector: '$.Payload' + Type: 'StringMap' diff --git a/source/remediation_runbooks/DisableUnrestrictedAccessToHighRiskPorts.yaml b/source/remediation_runbooks/DisableUnrestrictedAccessToHighRiskPorts.yaml new file mode 100644 index 00000000..9b3e02e3 --- /dev/null +++ b/source/remediation_runbooks/DisableUnrestrictedAccessToHighRiskPorts.yaml @@ -0,0 +1,50 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +schemaVersion: "0.3" +description: | + ### Document name - ASR-DisableUnrestrictedAccessToHighRiskPorts + + ## What does this document do? + This document disables unrestricted access to high risk ports using + [DescribeSecurityGroupRules](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeSecurityGroupRules.html) API, + [ModifySecurityGroupRules](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_ModifySecurityGroupRules.html) API. + + + ## Input Parameters + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + * SecurityGroupId: (Required) The Id of the security group. + + ## Security Standards / Controls + * AFSBP v1.0.0: EC2.19 + + +assumeRole: "{{ AutomationAssumeRole }}" +parameters: + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + SecurityGroupId: + type: String + description: (Required) The Id of the Seurity Group. + allowedPattern: '^sg-[a-z0-9\-]+$' + +outputs: + - DisableUnrestrictedAccessToHighRiskPorts.Output +mainSteps: +- name: 'DisableUnrestrictedAccessToHighRiskPorts' + action: 'aws:executeScript' + maxAttempts: 3 + timeoutSeconds: 600 + inputs: + InputPayload: + SecurityGroupId: '{{ SecurityGroupId }}' + Runtime: 'python3.8' + Handler: 'lambda_handler' + Script: |- + %%SCRIPT=DisableUnrestrictedAccessToHighRiskPorts.py%% + outputs: + - Name: 'Output' + Selector: '$.Payload' + Type: 'StringMap' diff --git a/source/remediation_runbooks/EnableAutoScalingGroupELBHealthCheck.yaml b/source/remediation_runbooks/EnableAutoScalingGroupELBHealthCheck.yaml index 9db079c4..dbb3b6ed 100644 --- a/source/remediation_runbooks/EnableAutoScalingGroupELBHealthCheck.yaml +++ b/source/remediation_runbooks/EnableAutoScalingGroupELBHealthCheck.yaml @@ -18,7 +18,7 @@ description: | * Remediation.Output - stdout messages from the remediation ## Security Standards / Controls - * AFSBP v1.0.0: Autoscaling.1 + * AWS FSBP v1.0.0: Autoscaling.1 * CIS v1.2.0: 2.1 * PCI: Autoscaling.1 diff --git a/source/remediation_runbooks/EnableAutoSecretRotation.yaml b/source/remediation_runbooks/EnableAutoSecretRotation.yaml new file mode 100644 index 00000000..01fbc5ed --- /dev/null +++ b/source/remediation_runbooks/EnableAutoSecretRotation.yaml @@ -0,0 +1,54 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +schemaVersion: "0.3" +description: | + ### Document name - ASR-EnableAutoSecretRotation + + ## What does this document do? + This document enables automatic rotation on a Secrets Manager secret if a Lambda function is already associated with it. + [RotateSecret](https://docs.aws.amazon.com/secretsmanager/latest/apireference/API_RotateSecret.html) API. + + + ## Input Parameters + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + * SecretARN: (Required) The ARN of the Secrets Manager secret. + * MaximumAllowedRotationFrequency: (Optional) The number of days that a secret must be automatically rotated within. + + ## Security Standards / Controls + * AFSBP v1.0.0: SecretsManager.1 + + +assumeRole: "{{ AutomationAssumeRole }}" +parameters: + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + SecretARN: + type: String + description: (Required) The ARN of the Secrets Manager secret. + allowedPattern: '^arn:(?:aws|aws-cn|aws-us-gov):secretsmanager:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:secret:([A-Za-z0-9\/_+=.@-]+)$' + MaximumAllowedRotationFrequency: + type: Integer + description: (Optional) The number of days that a secret must be automatically rotated within. + default: 90 +outputs: + - EnableAutoSecretRotation.Output +mainSteps: +- name: 'EnableAutoSecretRotation' + action: 'aws:executeScript' + maxAttempts: 3 + timeoutSeconds: 600 + inputs: + InputPayload: + SecretARN: '{{ SecretARN }}' + MaximumAllowedRotationFrequency: '{{ MaximumAllowedRotationFrequency }}' + Runtime: 'python3.8' + Handler: 'lambda_handler' + Script: |- + %%SCRIPT=EnableAutoSecretRotation.py%% + outputs: + - Name: 'Output' + Selector: '$.Payload' + Type: 'StringMap' diff --git a/source/remediation_runbooks/EnableBucketEventNotifications.yaml b/source/remediation_runbooks/EnableBucketEventNotifications.yaml new file mode 100644 index 00000000..fb59a308 --- /dev/null +++ b/source/remediation_runbooks/EnableBucketEventNotifications.yaml @@ -0,0 +1,73 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +schemaVersion: '0.3' +description: | + ### Document name - ASR-EnableBucketEventNotifications + + ## What does this document do? + This document creates an SNS topic if it does not already exist, then configures notifications on an S3 bucket that posts event notifications to that topic. + + ## Input Parameters + * AccountId: (Required) Account ID of the account for the finding + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + * BucketName: (Required) Name of bucket that event notifications will be triggered on. + * TopicName: (Required) The name of the SNS topic to create and configure for notifications. + + ## Security Standards / Controls + * AWS FSBP v1.0.0: S3.11 +assumeRole: '{{ AutomationAssumeRole }}' +parameters: + AccountId: + type: String + description: Account ID of the account for the finding + allowedPattern: ^[0-9]{12}$ + AutomationAssumeRole: + type: 'String' + description: '(Required) The ARN of the role that allows Automation to perform the actions on your behalf.' + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + BucketName: + type: 'String' + description: '(Required) The name of the S3 Bucket.' + allowedPattern: (?=^.{3,63}$)(?!^(\d+\.)+\d+$)(^(([a-z0-9]|[a-z0-9][a-z0-9\-]*[a-z0-9])\.)*([a-z0-9]|[a-z0-9][a-z0-9\-]*[a-z0-9])$) + TopicName: + type: 'String' + description: '(Optional) The name of the SNS topic to create and configure for notifications.' + allowedPattern: '^[a-zA-Z0-9][a-zA-Z0-9-_]{0,255}$' + default: 'SO0111-ASR-S3BucketNotifications' + EventTypes: + type: 'StringList' + description: '(Optional) The event types to add notifications for.' + default: [ + "s3:ReducedRedundancyLostObject", + "s3:ObjectCreated:*", + "s3:ObjectRemoved:*", + "s3:ObjectRestore:*", + "s3:Replication:*", + "s3:LifecycleExpiration:*", + "s3:LifecycleTransition", + "s3:IntelligentTiering", + "s3:ObjectTagging:*", + "s3:ObjectAcl:Put", + ] +outputs: +- 'EnableBucketEventNotifications.Output' +mainSteps: +- name: 'EnableBucketEventNotifications' + action: 'aws:executeScript' + timeoutSeconds: 600 + inputs: + InputPayload: + bucket_name: '{{ BucketName }}' + topic_name: '{{ TopicName }}' + account_id: '{{ AccountId }}' + event_types: '{{ EventTypes }}' + Runtime: 'python3.8' + Handler: 'lambda_handler' + Script: |- + %%SCRIPT=enable_bucket_event_notifications.py%% + outputs: + - Name: 'Output' + Selector: '$.Payload.output' + Type: 'StringMap' + isEnd: true diff --git a/source/remediation_runbooks/EnableCloudFrontDefaultRootObject.yaml b/source/remediation_runbooks/EnableCloudFrontDefaultRootObject.yaml new file mode 100644 index 00000000..04bad723 --- /dev/null +++ b/source/remediation_runbooks/EnableCloudFrontDefaultRootObject.yaml @@ -0,0 +1,57 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +description: |- + ### Document name - AWSConfigRemediation-EnableCloudFrontDefaultRootObject + + ## What does this document do? + This runbook configures the default root object for the Amazon CloudFront distribution you specify using the [UpdateDistribution](https://docs.aws.amazon.com/cloudfront/latest/APIReference/API_UpdateDistribution.html) API. + + ## Input Parameters + * CloudFrontDistribution: (Required) The ARN of the CloudFront distribution you want to configure the default root object for. + * DefaultRootObject: (Required) The object that you want CloudFront to return when a viewer request points to your root URL. + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + + ## Output Parameters + * UpdateDistributionAndVerify.Output: The standard HTTP response from the UpdateDistribution API. + +schemaVersion: "0.3" +assumeRole: "{{ AutomationAssumeRole }}" +parameters: + CloudFrontDistribution: + type: String + description: (Required) The ID of the CloudFront distribution you want to configure the default root object for. + allowedPattern: ^(arn:(?:aws|aws-us-gov|aws-cn):cloudfront::\d{12}:distribution\/([A-Z0-9]+))$ + DefaultRootObject: + type: String + description: (Required) The object that you want CloudFront to return when a viewer request points to your root URL. + allowedPattern: ^[\w._-~]{1,255}$ + default: index.html + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' +outputs: + - UpdateDistributionAndVerify.Output +mainSteps: + - name: UpdateDistributionAndVerify + action: "aws:executeScript" + description: | + ## UpdateDistributionAndVerify + Configures the default root object for the CloudFront distribution you specify in the CloudFrontDistributionId parameter and verifies it's successful modification. + ## outputs + * Output: The standard HTTP response from the UpdateDistribution API. + isEnd: true + timeoutSeconds: 600 + inputs: + Runtime: python3.8 + Handler: handler + InputPayload: + cloudfront_distribution: "{{ CloudFrontDistribution }}" + root_object: "{{ DefaultRootObject }}" + Script: |- + %%SCRIPT=enable_cloudfront_default_root_object.py%% + outputs: + - Name: Output + Type: StringMap + Selector: $.Payload.Output \ No newline at end of file diff --git a/source/remediation_runbooks/EnableCloudTrailEncryption.yaml b/source/remediation_runbooks/EnableCloudTrailEncryption.yaml index 63303545..b82e200b 100644 --- a/source/remediation_runbooks/EnableCloudTrailEncryption.yaml +++ b/source/remediation_runbooks/EnableCloudTrailEncryption.yaml @@ -13,7 +13,7 @@ description: | * TrailArn: ARN of the CloudTrail to encrypt ## Security Standards / Controls - * AFSBP v1.0.0: CloudTrail.2 + * AWS FSBP v1.0.0: CloudTrail.2 * CIS v1.2.0: 2.7 * PCI: CloudTrail.1 diff --git a/source/remediation_runbooks/EnableCloudTrailToCloudWatchLogging.yaml b/source/remediation_runbooks/EnableCloudTrailToCloudWatchLogging.yaml index 8809fad0..70d5658f 100644 --- a/source/remediation_runbooks/EnableCloudTrailToCloudWatchLogging.yaml +++ b/source/remediation_runbooks/EnableCloudTrailToCloudWatchLogging.yaml @@ -11,9 +11,9 @@ description: | * KMSKeyArn (from SSM): Arn of the KMS key to be used to encrypt data ## Security Standards / Controls - * AFSBP v1.0.0: N/A - * CIS v1.2.0: 2.4 - * PCI: CloudTrail.4 + * AWS FSBP v1.0.0: N/A + * CIS v1.2.0: 2.4 + * PCI: CloudTrail.4 schemaVersion: "0.3" assumeRole: "{{ AutomationAssumeRole }}" diff --git a/source/remediation_runbooks/EnableDefaultEncryptionS3.yaml b/source/remediation_runbooks/EnableDefaultEncryptionS3.yaml index 5f380f58..a6c54d1b 100644 --- a/source/remediation_runbooks/EnableDefaultEncryptionS3.yaml +++ b/source/remediation_runbooks/EnableDefaultEncryptionS3.yaml @@ -17,9 +17,9 @@ description: | * Remediation.Output - stdout messages from the remediation ## Security Standards / Controls - * AFSBP v1.0.0: S3.4 - * CIS v1.2.0: n/a - * PCI: S3.4 + * AWS FSBP v1.0.0: S3.4 + * CIS v1.2.0: n/a + * PCI: S3.4 schemaVersion: "0.3" assumeRole: "{{ AutomationAssumeRole }}" diff --git a/source/remediation_runbooks/EnableDeliveryStatusLoggingForSNSTopic.yaml b/source/remediation_runbooks/EnableDeliveryStatusLoggingForSNSTopic.yaml index 3eb7f358..f3d52e1f 100644 --- a/source/remediation_runbooks/EnableDeliveryStatusLoggingForSNSTopic.yaml +++ b/source/remediation_runbooks/EnableDeliveryStatusLoggingForSNSTopic.yaml @@ -15,7 +15,7 @@ description: | * SNSTopicArn: (Required) The ARN of the Amazon SNS Topic. ## Security Standards / Controls - * AFSBP v1.0.0: SNS.2 + * AWS FSBP v1.0.0: SNS.2 assumeRole: "{{ AutomationAssumeRole }}" parameters: diff --git a/source/remediation_runbooks/EnableEncryptionForSNSTopic.yaml b/source/remediation_runbooks/EnableEncryptionForSNSTopic.yaml index 5335c8ce..2c1f58a4 100644 --- a/source/remediation_runbooks/EnableEncryptionForSNSTopic.yaml +++ b/source/remediation_runbooks/EnableEncryptionForSNSTopic.yaml @@ -18,7 +18,7 @@ description: | * KmsKeyArn: (Required) The ARN of AWS KMS Key. ## Security Standards / Controls - * AFSBP v1.0.0: SNS.1 + * AWS FSBP v1.0.0: SNS.1 assumeRole: "{{ AutomationAssumeRole }}" parameters: diff --git a/source/remediation_runbooks/EnableEncryptionForSQSQueue.yaml b/source/remediation_runbooks/EnableEncryptionForSQSQueue.yaml index 7922c58a..58f5ed5a 100644 --- a/source/remediation_runbooks/EnableEncryptionForSQSQueue.yaml +++ b/source/remediation_runbooks/EnableEncryptionForSQSQueue.yaml @@ -18,7 +18,7 @@ description: | * KmsKeyArn: (Required) The ARN of AWS KMS Key. ## Security Standards / Controls - * AFSBP v1.0.0: SQS.1 + * AWS FSBP v1.0.0: SQS.1 assumeRole: '{{ AutomationAssumeRole }}' parameters: AutomationAssumeRole: diff --git a/source/remediation_runbooks/EnableGuardDuty.yaml b/source/remediation_runbooks/EnableGuardDuty.yaml new file mode 100644 index 00000000..c9c9c397 --- /dev/null +++ b/source/remediation_runbooks/EnableGuardDuty.yaml @@ -0,0 +1,40 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +schemaVersion: "0.3" +description: | + ### Document name - ASR-EnableGuardDuty + + ## What does this document do? + This document enables Amazon GuardDuty. + + ## Input Parameters + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + + ## Security Standards / Controls + * AFSBP v1.0.0: GuardDuty.1 + + +assumeRole: "{{ AutomationAssumeRole }}" +parameters: + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + +outputs: + - EnableGuardDuty.Output +mainSteps: +- name: 'EnableGuardDuty' + action: 'aws:executeScript' + maxAttempts: 3 + timeoutSeconds: 600 + inputs: + Runtime: 'python3.8' + Handler: 'lambda_handler' + Script: |- + %%SCRIPT=EnableGuardDuty.py%% + outputs: + - Name: 'Output' + Selector: '$.Payload' + Type: 'StringMap' diff --git a/source/remediation_runbooks/EnableIMDSV2OnInstance.yaml b/source/remediation_runbooks/EnableIMDSV2OnInstance.yaml new file mode 100644 index 00000000..30b4d7d0 --- /dev/null +++ b/source/remediation_runbooks/EnableIMDSV2OnInstance.yaml @@ -0,0 +1,47 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +schemaVersion: "0.3" +description: | + ### Document name - ASR-EnableIMDSV2OnInstance + + ## What does this document do? + This document enables IMDS V2 by using the + [ModifyInstanceMetadataOptions](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_ModifyInstanceMetadataOptions.html) API. + + ## Input Parameters + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + * InstanceARN: (Required) The ARN of the Amazon EC2 Instance. + + ## Security Standards / Controls + * AWS FSBP v1.0.0: EC2.8 + * NIST 800-53 Rev5: EC2.8 + +assumeRole: "{{ AutomationAssumeRole }}" +parameters: + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + InstanceARN: + type: String + description: (Required) The ARN of the Amazon EC2 Instance. + allowedPattern: '^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:instance\/(i-[0-9a-f]*)$' + +outputs: + - EnableIMDSV2OnInstance.Output +mainSteps: +- name: 'EnableIMDSV2OnInstance' + action: 'aws:executeScript' + timeoutSeconds: 600 + inputs: + InputPayload: + instance_arn: '{{ InstanceARN }}' + Runtime: 'python3.8' + Handler: 'lambda_handler' + Script: |- + %%SCRIPT=enable_imds_v2_on_instance.py%% + outputs: + - Name: 'Output' + Selector: '$.Payload' + Type: 'StringMap' diff --git a/source/remediation_runbooks/EnablePrivateRepositoryScanning.yaml b/source/remediation_runbooks/EnablePrivateRepositoryScanning.yaml new file mode 100644 index 00000000..a4ada2dd --- /dev/null +++ b/source/remediation_runbooks/EnablePrivateRepositoryScanning.yaml @@ -0,0 +1,49 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +schemaVersion: "0.3" +description: | + ### Document name - ASR-EnablePrivateRepositoryScanning + + ## What does this document do? + This document enables image scanning configuration on a private ECR repository. + [PutImageScanningConfiguration](https://docs.aws.amazon.com/AmazonECR/latest/APIReference/API_PutImageScanningConfiguration.html) API. + + + ## Input Parameters + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + * RepositoryName: (Required) The name of the ECR private repository. + + ## Security Standards / Controls + * AFSBP v1.0.0: ECR.1 + + +assumeRole: "{{ AutomationAssumeRole }}" +parameters: + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + RepositoryName: + type: String + description: (Required) The name of the ECR private repository. + allowedPattern: '([a-z0-9._\/\-]+)$' + +outputs: + - EnablePrivateRepositoryScanning.Output +mainSteps: +- name: 'EnablePrivateRepositoryScanning' + action: 'aws:executeScript' + maxAttempts: 3 + timeoutSeconds: 600 + inputs: + InputPayload: + RepositoryName: '{{ RepositoryName }}' + Runtime: 'python3.8' + Handler: 'lambda_handler' + Script: |- + %%SCRIPT=EnablePrivateRepositoryScanning.py%% + outputs: + - Name: 'Output' + Selector: '$.Payload' + Type: 'StringMap' diff --git a/source/remediation_runbooks/EnableVPCFlowLogs.yaml b/source/remediation_runbooks/EnableVPCFlowLogs.yaml index 1a6f36fe..59b47ddb 100644 --- a/source/remediation_runbooks/EnableVPCFlowLogs.yaml +++ b/source/remediation_runbooks/EnableVPCFlowLogs.yaml @@ -13,7 +13,7 @@ description: | * KMSKeyArn: Amazon Resource Name (ARN) of the KMS Customer-Managed Key to use to encrypt the log group ## Security Standards / Controls - * AFSBP v1.0.0: CloudTrail.2 + * AWS FSBP v1.0.0: CloudTrail.2 * CIS v1.2.0: 2.7 * PCI: CloudTrail.1 diff --git a/source/remediation_runbooks/EncryptRDSSnapshot.yaml b/source/remediation_runbooks/EncryptRDSSnapshot.yaml index 8a72252e..8c6a5563 100644 --- a/source/remediation_runbooks/EncryptRDSSnapshot.yaml +++ b/source/remediation_runbooks/EncryptRDSSnapshot.yaml @@ -36,7 +36,7 @@ parameters: SourceDBSnapshotIdentifier: type: 'String' description: '(Required) The name of the unencrypted RDS snapshot or cluster snapshot to copy.' - allowedPattern: '^(?:rds:)?(?!.*--.*)(?!.*-$)[a-zA-Z][a-zA-Z0-9-]{0,254}$' + allowedPattern: '^(?:rds:|awsbackup:)?(?!.*--.*)(?!.*-$)[a-zA-Z][a-zA-Z0-9-]{0,254}$' TargetDBSnapshotIdentifier: type: 'String' description: '(Required) The name of the encrypted RDS snapshot or cluster snapshot to create.' diff --git a/source/remediation_runbooks/MakeEBSSnapshotsPrivate.yaml b/source/remediation_runbooks/MakeEBSSnapshotsPrivate.yaml index 613f0707..1af4081b 100644 --- a/source/remediation_runbooks/MakeEBSSnapshotsPrivate.yaml +++ b/source/remediation_runbooks/MakeEBSSnapshotsPrivate.yaml @@ -16,7 +16,7 @@ description: | * Remediation.Output - stdout messages from the remediation ## Security Standards / Controls - * AFSBP v1.0.0: EC2.1 + * AWS FSBP v1.0.0: EC2.1 * CIS v1.2.0: n/a * PCI: EC2.1 diff --git a/source/remediation_runbooks/MakeRDSSnapshotPrivate.yaml b/source/remediation_runbooks/MakeRDSSnapshotPrivate.yaml index 9d1079a1..cabac988 100644 --- a/source/remediation_runbooks/MakeRDSSnapshotPrivate.yaml +++ b/source/remediation_runbooks/MakeRDSSnapshotPrivate.yaml @@ -18,7 +18,7 @@ description: | * Remediation.Output - stdout messages from the remediation ## Security Standards / Controls - * AFSBP v1.0.0: RDS.1 + * AWS FSBP v1.0.0: RDS.1 * CIS v1.2.0: n/a * PCI: RDS.1 diff --git a/source/remediation_runbooks/RemoveCodeBuildPrivilegedMode.yaml b/source/remediation_runbooks/RemoveCodeBuildPrivilegedMode.yaml new file mode 100644 index 00000000..22c27eae --- /dev/null +++ b/source/remediation_runbooks/RemoveCodeBuildPrivilegedMode.yaml @@ -0,0 +1,49 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +description: | + ### Document Name - ASR-RemoveCodeBuildPrivilegedMode + + ## What does this document do? + This document removes CodeBuild project privileged mode to remove a build project's Docker container access to all devices. + + ## Input Parameters + * ProjectName: (Required) Name of the CodeBuild project (not the ARN). + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + + ## Security Standards / Controls + * AWS FSBP v1.0.0: CodeBuild.5 + * NIST 800-53 Rev5: CodeBuild.5 + + ## Output Parameters + * RemoveCodeBuildPrivilegedMode.Output + +schemaVersion: "0.3" +assumeRole: "{{ AutomationAssumeRole }}" + +parameters: + ProjectName: + type: String + description: (Required) The project name (not the ARN). + allowedPattern: ^[A-Za-z0-9][A-Za-z0-9\-_]{1,254}$ + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' +outputs: + - RemoveCodeBuildPrivilegedMode.Output +mainSteps: + - name: RemoveCodeBuildPrivilegedMode + action: 'aws:executeScript' + timeoutSeconds: 600 + inputs: + InputPayload: + project_name: '{{ProjectName}}' + Runtime: python3.8 + Handler: lambda_handler + Script: |- + %%SCRIPT=remove_codebuild_privileged_mode.py%% + outputs: + - Name: Output + Selector: $.Payload + Type: StringMap diff --git a/source/remediation_runbooks/RemoveLambdaPublicAccess.yaml b/source/remediation_runbooks/RemoveLambdaPublicAccess.yaml index 69a296a0..f65f8d13 100644 --- a/source/remediation_runbooks/RemoveLambdaPublicAccess.yaml +++ b/source/remediation_runbooks/RemoveLambdaPublicAccess.yaml @@ -19,9 +19,9 @@ description: | * RemoveLambdaPublicAccess.Output - stdout messages from the remediation ## Security Standards / Controls - * AFSBP v1.0.0: Lambda.1 - * CIS v1.2.0: n/a - * PCI: Lambda.1 + * AWS FSBP v1.0.0: Lambda.1 + * CIS v1.2.0: n/a + * PCI: Lambda.1 assumeRole: "{{ AutomationAssumeRole }}" parameters: diff --git a/source/remediation_runbooks/RemoveUnusedSecret.yaml b/source/remediation_runbooks/RemoveUnusedSecret.yaml new file mode 100644 index 00000000..22ed11e9 --- /dev/null +++ b/source/remediation_runbooks/RemoveUnusedSecret.yaml @@ -0,0 +1,56 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +schemaVersion: "0.3" +description: | + ### Document name - ASR-RemoveUnusedSecret + + ## What does this document do? + This document deletes a secret that has been unused for the number of days specified in the unusedForDays parameter (Default: 90 days). + There is a 30 day period to recover the secret after it is deleted. + [DeleteSecret](https://docs.aws.amazon.com/secretsmanager/latest/apireference/API_DeleteSecret.html) API. + + + ## Input Parameters + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + * SecretARN: (Required) The ARN of the Secrets Manager secret. + * UnusedForDays: (Optional) Maximum number of days that a secret can remain unused. + + ## Security Standards / Controls + * AFSBP v1.0.0: SecretsManager.3 + + +assumeRole: "{{ AutomationAssumeRole }}" +parameters: + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + SecretARN: + type: String + description: (Required) The ARN of the Secrets Manager secret. + allowedPattern: '^arn:(?:aws|aws-cn|aws-us-gov):secretsmanager:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:secret:([A-Za-z0-9\/_+=.@-]+)$' + UnusedForDays: + type: Integer + description: (Optional) Maximum number of days that a secret can remain unused. + allowedPattern: ^\d{0,3}$ + default: 90 +outputs: + - RemoveUnusedSecret.Output +mainSteps: +- name: 'RemoveUnusedSecret' + action: 'aws:executeScript' + maxAttempts: 3 + timeoutSeconds: 600 + inputs: + InputPayload: + SecretARN: '{{ SecretARN }}' + UnusedForDays: '{{ UnusedForDays }}' + Runtime: 'python3.8' + Handler: 'lambda_handler' + Script: |- + %%SCRIPT=RemoveUnusedSecret.py%% + outputs: + - Name: 'Output' + Selector: '$.Payload' + Type: 'StringMap' diff --git a/source/remediation_runbooks/RevokeUnauthorizedInboundRules.yaml b/source/remediation_runbooks/RevokeUnauthorizedInboundRules.yaml new file mode 100644 index 00000000..1ec205fd --- /dev/null +++ b/source/remediation_runbooks/RevokeUnauthorizedInboundRules.yaml @@ -0,0 +1,60 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +schemaVersion: "0.3" +description: | + ### Document name - ASR-RevokeUnauthorizedInboundRules + + ## What does this document do? + This document revokes inbound security group rules that allow unrestricted access to ports that are not authorized. + Authorized ports are listed in authorizedTcpPorts and authorizedUdpPorts parameters. + + ## Input Parameters + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + * SecurityGroupId: (Required) The ID of the Seurity Group. + * AuthorizedTcpPorts: (Optional) List of TCP ports authorized to be open to 0.0.0.0/0 or ::/0. + * AuthorizedUdpPorts: (Optional) List of UDP ports authorized to be open to 0.0.0.0/0 or ::/0. + + ## Security Standards / Controls + * AFSBP v1.0.0: EC2.18 + + +assumeRole: "{{ AutomationAssumeRole }}" +parameters: + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + SecurityGroupId: + type: String + description: (Required) The ID of the Seurity Group. + allowedPattern: '^sg-[a-z0-9\-]+$' + AuthorizedTcpPorts: + type: StringList + description: (Optional) List of TCP ports authorized to be open to 0.0.0.0/0 or ::/0. + default: ["80", "443"] + AuthorizedUdpPorts: + type: StringList + description: (Optional) List of UDP ports authorized to be open to 0.0.0.0/0 or ::/0. + default: [] + +outputs: + - RevokeUnauthorizedInboundRules.Output +mainSteps: +- name: 'RevokeUnauthorizedInboundRules' + action: 'aws:executeScript' + maxAttempts: 3 + timeoutSeconds: 600 + inputs: + InputPayload: + SecurityGroupId: '{{ SecurityGroupId }}' + AuthorizedTcpPorts: '{{ AuthorizedTcpPorts }}' + AuthorizedUdpPorts: '{{ AuthorizedUdpPorts }}' + Runtime: 'python3.8' + Handler: 'lambda_handler' + Script: |- + %%SCRIPT=RevokeUnauthorizedInboundRules.py%% + outputs: + - Name: 'Output' + Selector: '$.Payload' + Type: 'StringMap' diff --git a/source/remediation_runbooks/SetCloudFrontOriginDomain.yaml b/source/remediation_runbooks/SetCloudFrontOriginDomain.yaml new file mode 100644 index 00000000..9918ea37 --- /dev/null +++ b/source/remediation_runbooks/SetCloudFrontOriginDomain.yaml @@ -0,0 +1,45 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +schemaVersion: "0.3" +description: | + ### Document name - ASR-SetCloudFrontOriginDomain + + ## What does this document do? + This document updates the origin domain on a given CloudFront distribution to prevent a malicious third party from creating the referenced bucket and serving their own content through your distribution. + + ## Input Parameters + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + * DistributionId: (Required) ID of the CloudFront Distribution to be updated. + + ## Security Standards / Controls + * NIST80053 v5.0.0: CloudFront.12 + +assumeRole: "{{ AutomationAssumeRole }}" +parameters: + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + DistributionId: + type: String + description: (Required) The Distribution ID of the CloudFront distribution. + allowedPattern: '^[A-Za-z0-9]*$' + +outputs: + - SetCloudFrontOriginDomain.Output +mainSteps: +- name: 'SetCloudFrontOriginDomain' + action: 'aws:executeScript' + timeoutSeconds: 600 + inputs: + InputPayload: + Id: '{{ DistributionId }}' + Runtime: python3.8 + Handler: 'lambda_handler' + Script: |- + %%SCRIPT=SetCloudFrontOriginDomain.py%% + outputs: + - Name: 'Output' + Selector: '$.Payload' + Type: 'StringMap' \ No newline at end of file diff --git a/source/remediation_runbooks/SetS3LifecyclePolicy.yaml b/source/remediation_runbooks/SetS3LifecyclePolicy.yaml new file mode 100644 index 00000000..89cebd4e --- /dev/null +++ b/source/remediation_runbooks/SetS3LifecyclePolicy.yaml @@ -0,0 +1,65 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +schemaVersion: "0.3" +description: | + ### Document name - ASR-SetS3LifecyclePolicy + + ## What does this document do? + This document sets an example lifecycle policy that transfers objects greater than 10 GB to S3 Intelligent Tiering after 90 days. + It is recommended to set lifecycle policies appropriate for the objects stored in your S3 bucket. + [PutBucketLifecycleConfiguration](https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutBucketLifecycleConfiguration.html) API. + + + ## Input Parameters + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + * BucketName: (Required) The name of the S3 bucket. + + ## Security Standards / Controls + * AFSBP v1.0.0: S3.13 + + +assumeRole: "{{ AutomationAssumeRole }}" +parameters: + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + BucketName: + type: String + description: (Required) The name of the S3 bucket. + allowedPattern: '(?=^.{3,63}$)(?!^(\d+\.)+\d+$)(^(([a-z0-9]|[a-z0-9][a-z0-9\-]*[a-z0-9])\.)*([a-z0-9]|[a-z0-9][a-z0-9\-]*[a-z0-9])$)' + TargetTransitionDays: + type: Integer + description: (Optional) The number of days until transition used for the lifecycle policy. + default: 30 + TargetExpirationDays: + type: Integer + description: (Optional) The number of days until expiration used for the lifecycle policy. + default: 0 + TargetTransitionStorageClass: + type: String + description: (Optional) The name of the storage class that will be used for the lifecycle policy. + default: "INTELLIGENT_TIERING" + allowedPattern: '.*' +outputs: + - SetS3LifecyclePolicy.Output +mainSteps: +- name: 'SetS3LifecyclePolicy' + action: 'aws:executeScript' + maxAttempts: 3 + timeoutSeconds: 600 + inputs: + InputPayload: + BucketName: '{{ BucketName }}' + TargetTransitionDays: '{{ TargetTransitionDays }}' + TargetExpirationDays: '{{ TargetExpirationDays }}' + TargetTransitionStorageClass: '{{ TargetTransitionStorageClass }}' + Runtime: 'python3.8' + Handler: 'lambda_handler' + Script: |- + %%SCRIPT=SetS3LifecyclePolicy.py%% + outputs: + - Name: 'Output' + Selector: '$.Payload' + Type: 'StringMap' diff --git a/source/remediation_runbooks/SetSSLBucketPolicy.yaml b/source/remediation_runbooks/SetSSLBucketPolicy.yaml index 0490a77e..dec6d6b1 100644 --- a/source/remediation_runbooks/SetSSLBucketPolicy.yaml +++ b/source/remediation_runbooks/SetSSLBucketPolicy.yaml @@ -18,9 +18,9 @@ description: | * Remediation.Output - stdout messages from the remediation ## Security Standards / Controls - * AFSBP v1.0.0: S3.5 - * CIS v1.2.0: n/a - * PCI: S3.5 + * AWS FSBP v1.0.0: S3.5 + * CIS v1.2.0: n/a + * PCI: S3.5 assumeRole: "{{ AutomationAssumeRole }}" parameters: diff --git a/source/remediation_runbooks/UpdateSecretRotationPeriod.yaml b/source/remediation_runbooks/UpdateSecretRotationPeriod.yaml new file mode 100644 index 00000000..4ed4c310 --- /dev/null +++ b/source/remediation_runbooks/UpdateSecretRotationPeriod.yaml @@ -0,0 +1,54 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +--- +schemaVersion: "0.3" +description: | + ### Document name - ASR-UpdateSecretRotationPeriod + + ## What does this document do? + This document rotates a secret and sets its rotation period to 90 days. + [RotateSecret](https://docs.aws.amazon.com/secretsmanager/latest/apireference/API_RotateSecret.html) API. + + + ## Input Parameters + * AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + * SecretARN: (Required) The ARN of the Secrets Manager secret. + + ## Security Standards / Controls + * AFSBP v1.0.0: SecretsManager.4 + + +assumeRole: "{{ AutomationAssumeRole }}" +parameters: + AutomationAssumeRole: + type: String + description: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + allowedPattern: '^arn:(?:aws|aws-us-gov|aws-cn):iam::\d{12}:role/[\w+=,.@-]+$' + SecretARN: + type: String + description: (Required) The ARN of the Secrets Manager secret. + allowedPattern: '^arn:(?:aws|aws-cn|aws-us-gov):secretsmanager:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:secret:([A-Za-z0-9\/_+=.@-]+)$' + MaxDaysSinceRotation: + type: Integer + description: (Optional) The number of days set for the secret's rotation period. + allowedPattern: ^\d{0,3}$ + default: 90 +outputs: + - UpdateSecretRotationPeriod.Output +mainSteps: +- name: 'UpdateSecretRotationPeriod' + action: 'aws:executeScript' + maxAttempts: 3 + timeoutSeconds: 600 + inputs: + InputPayload: + SecretARN: '{{ SecretARN }}' + MaxDaysSinceRotation: '{{ MaxDaysSinceRotation }}' + Runtime: 'python3.8' + Handler: 'lambda_handler' + Script: |- + %%SCRIPT=UpdateSecretRotationPeriod.py%% + outputs: + - Name: 'Output' + Selector: '$.Payload' + Type: 'StringMap' diff --git a/source/remediation_runbooks/scripts/CreateAccessLoggingBucket_createloggingbucket.py b/source/remediation_runbooks/scripts/CreateAccessLoggingBucket_createloggingbucket.py index 69fbcd17..a0d2a6d7 100644 --- a/source/remediation_runbooks/scripts/CreateAccessLoggingBucket_createloggingbucket.py +++ b/source/remediation_runbooks/scripts/CreateAccessLoggingBucket_createloggingbucket.py @@ -1,28 +1,47 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 +from typing import TYPE_CHECKING, TypedDict, cast + import boto3 -from botocore.exceptions import ClientError from botocore.config import Config -from typing import TYPE_CHECKING, Dict +from botocore.exceptions import ClientError if TYPE_CHECKING: - from mypy_boto3_s3 import S3Client from aws_lambda_powertools.utilities.typing import LambdaContext + from mypy_boto3_s3.client import S3Client + from mypy_boto3_s3.literals import BucketLocationConstraintType + from mypy_boto3_s3.type_defs import CreateBucketRequestRequestTypeDef else: S3Client = object LambdaContext = object + BucketLocationConstraintType = object + CreateBucketRequestRequestTypeDef = object def connect_to_s3(boto_config: Config) -> S3Client: - return boto3.client("s3", config=boto_config) + s3: S3Client = boto3.client("s3", config=boto_config) + return s3 + + +class Event(TypedDict): + BucketName: str + AWS_REGION: str + + +class Output(TypedDict): + Message: str + + +class Response(TypedDict): + output: Output -def create_logging_bucket(event: Dict, _: LambdaContext) -> Dict: +def create_logging_bucket(event: Event, _: LambdaContext) -> Response: boto_config = Config(retries={"mode": "standard"}) s3 = connect_to_s3(boto_config) try: - kwargs = { + kwargs: CreateBucketRequestRequestTypeDef = { "Bucket": event["BucketName"], "GrantWrite": "uri=http://acs.amazonaws.com/groups/s3/LogDelivery", "GrantReadACP": "uri=http://acs.amazonaws.com/groups/s3/LogDelivery", @@ -30,7 +49,9 @@ def create_logging_bucket(event: Dict, _: LambdaContext) -> Dict: } if event["AWS_REGION"] != "us-east-1": kwargs["CreateBucketConfiguration"] = { - "LocationConstraint": event["AWS_REGION"] + "LocationConstraint": cast( + BucketLocationConstraintType, event["AWS_REGION"] + ) } s3.create_bucket(**kwargs) diff --git a/source/remediation_runbooks/scripts/CreateCloudTrailMultiRegionTrail_createcloudtrailbucketpolicy.py b/source/remediation_runbooks/scripts/CreateCloudTrailMultiRegionTrail_createcloudtrailbucketpolicy.py index 76151b7b..5242485a 100644 --- a/source/remediation_runbooks/scripts/CreateCloudTrailMultiRegionTrail_createcloudtrailbucketpolicy.py +++ b/source/remediation_runbooks/scripts/CreateCloudTrailMultiRegionTrail_createcloudtrailbucketpolicy.py @@ -1,25 +1,22 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import json + import boto3 from botocore.config import Config -from botocore.exceptions import ClientError + def connect_to_s3(boto_config): - return boto3.client('s3', config=boto_config) + return boto3.client("s3", config=boto_config) -def create_bucket_policy(event, _): - boto_config = Config( - retries ={ - 'mode': 'standard' - } - ) +def create_bucket_policy(event, _): + boto_config = Config(retries={"mode": "standard"}) s3 = connect_to_s3(boto_config) - cloudtrail_bucket = event['cloudtrail_bucket'] - aws_partition = event['partition'] - aws_account = event['account'] + cloudtrail_bucket = event["cloudtrail_bucket"] + aws_partition = event["partition"] + aws_account = event["account"] try: bucket_policy = { "Version": "2012-10-17", @@ -27,53 +24,43 @@ def create_bucket_policy(event, _): { "Sid": "AWSCloudTrailAclCheck20150319", "Effect": "Allow", - "Principal": { - "Service": [ - "cloudtrail.amazonaws.com" - ] - }, + "Principal": {"Service": ["cloudtrail.amazonaws.com"]}, "Action": "s3:GetBucketAcl", - "Resource": "arn:" + aws_partition + ":s3:::" + cloudtrail_bucket + "Resource": "arn:" + aws_partition + ":s3:::" + cloudtrail_bucket, }, { "Sid": "AWSCloudTrailWrite20150319", "Effect": "Allow", - "Principal": { - "Service": [ - "cloudtrail.amazonaws.com" - ] - }, + "Principal": {"Service": ["cloudtrail.amazonaws.com"]}, "Action": "s3:PutObject", - "Resource": "arn:" + aws_partition + ":s3:::" + cloudtrail_bucket + "/AWSLogs/" + aws_account + "/*", + "Resource": "arn:" + + aws_partition + + ":s3:::" + + cloudtrail_bucket + + "/AWSLogs/" + + aws_account + + "/*", "Condition": { - "StringEquals": { - "s3:x-amz-acl": "bucket-owner-full-control" - }, - } + "StringEquals": {"s3:x-amz-acl": "bucket-owner-full-control"}, + }, }, { "Sid": "AllowSSLRequestsOnly", "Effect": "Deny", "Principal": "*", "Action": "s3:*", - "Resource": ["arn:" + aws_partition + ":s3:::" + cloudtrail_bucket ,"arn:" + aws_partition + ":s3:::" + cloudtrail_bucket + "/*"], - "Condition": { - "Bool": { - "aws:SecureTransport": "false" - } - } - } - ] + "Resource": [ + "arn:" + aws_partition + ":s3:::" + cloudtrail_bucket, + "arn:" + aws_partition + ":s3:::" + cloudtrail_bucket + "/*", + ], + "Condition": {"Bool": {"aws:SecureTransport": "false"}}, + }, + ], } - s3.put_bucket_policy( - Bucket=cloudtrail_bucket, - Policy=json.dumps(bucket_policy) - ) + s3.put_bucket_policy(Bucket=cloudtrail_bucket, Policy=json.dumps(bucket_policy)) return { - "output": { - "Message": f'Set bucket policy for bucket {cloudtrail_bucket}' - } + "output": {"Message": f"Set bucket policy for bucket {cloudtrail_bucket}"} } except Exception as e: print(e) - exit('PutBucketPolicy failed: ' + str(e)) + exit("PutBucketPolicy failed: " + str(e)) diff --git a/source/remediation_runbooks/scripts/CreateCloudTrailMultiRegionTrail_createloggingbucket.py b/source/remediation_runbooks/scripts/CreateCloudTrailMultiRegionTrail_createloggingbucket.py index c2641aed..d10a3d79 100644 --- a/source/remediation_runbooks/scripts/CreateCloudTrailMultiRegionTrail_createloggingbucket.py +++ b/source/remediation_runbooks/scripts/CreateCloudTrailMultiRegionTrail_createloggingbucket.py @@ -1,23 +1,37 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 +from typing import TYPE_CHECKING, Dict, Literal, TypedDict, cast + import boto3 from botocore.config import Config from botocore.exceptions import ClientError -from typing import TYPE_CHECKING, Dict if TYPE_CHECKING: - from mypy_boto3_s3 import S3Client from aws_lambda_powertools.utilities.typing import LambdaContext + from mypy_boto3_s3.client import S3Client + from mypy_boto3_s3.literals import BucketLocationConstraintType + from mypy_boto3_s3.type_defs import CreateBucketRequestRequestTypeDef else: S3Client = object LambdaContext = object + BucketLocationConstraintType = object + CreateBucketRequestRequestTypeDef = object def connect_to_s3() -> S3Client: - return boto3.client("s3", config=Config(retries={"mode": "standard"})) + s3: S3Client = boto3.client("s3", config=Config(retries={"mode": "standard"})) + return s3 + + +class Event(TypedDict): + account: str + region: str + kms_key_arn: str -def create_logging_bucket(event: Dict, _: LambdaContext) -> Dict: +def create_logging_bucket( + event: Event, _: LambdaContext +) -> Dict[Literal["logging_bucket"], str]: s3 = connect_to_s3() kms_key_arn: str = event["kms_key_arn"] @@ -36,13 +50,15 @@ def create_logging_bucket(event: Dict, _: LambdaContext) -> Dict: def create_bucket(s3: S3Client, bucket_name: str, aws_region: str) -> str: try: - kwargs = { + kwargs: CreateBucketRequestRequestTypeDef = { "Bucket": bucket_name, "ACL": "private", "ObjectOwnership": "ObjectWriter", } if aws_region != "us-east-1": - kwargs["CreateBucketConfiguration"] = {"LocationConstraint": aws_region} + kwargs["CreateBucketConfiguration"] = { + "LocationConstraint": cast(BucketLocationConstraintType, aws_region) + } s3.create_bucket(**kwargs) return "success" diff --git a/source/remediation_runbooks/scripts/CreateCloudTrailMultiRegionTrail_enablecloudtrail.py b/source/remediation_runbooks/scripts/CreateCloudTrailMultiRegionTrail_enablecloudtrail.py index 59143a7f..051ffb01 100644 --- a/source/remediation_runbooks/scripts/CreateCloudTrailMultiRegionTrail_enablecloudtrail.py +++ b/source/remediation_runbooks/scripts/CreateCloudTrailMultiRegionTrail_enablecloudtrail.py @@ -2,36 +2,28 @@ # SPDX-License-Identifier: Apache-2.0 import boto3 from botocore.config import Config -from botocore.exceptions import ClientError + def connect_to_cloudtrail(boto_config): - return boto3.client('cloudtrail', config=boto_config) + return boto3.client("cloudtrail", config=boto_config) -def enable_cloudtrail(event, _): - boto_config = Config( - retries ={ - 'mode': 'standard' - } - ) +def enable_cloudtrail(event, _): + boto_config = Config(retries={"mode": "standard"}) ct = connect_to_cloudtrail(boto_config) try: ct.create_trail( - Name='multi-region-cloud-trail', - S3BucketName=event['cloudtrail_bucket'], + Name="multi-region-cloud-trail", + S3BucketName=event["cloudtrail_bucket"], IncludeGlobalServiceEvents=True, EnableLogFileValidation=True, IsMultiRegionTrail=True, - KmsKeyId=event['kms_key_arn'] - ) - ct.start_logging( - Name='multi-region-cloud-trail' + KmsKeyId=event["kms_key_arn"], ) + ct.start_logging(Name="multi-region-cloud-trail") return { - "output": { - "Message": f'CloudTrail Trail multi-region-cloud-trail created' - } + "output": {"Message": "CloudTrail Trail multi-region-cloud-trail created"} } except Exception as e: - exit('Error enabling AWS Config: ' + str(e)) + exit("Error enabling AWS Config: " + str(e)) diff --git a/source/remediation_runbooks/scripts/CreateCloudTrailMultiRegionTrail_process_results.py b/source/remediation_runbooks/scripts/CreateCloudTrailMultiRegionTrail_process_results.py index a51bfd69..c8ae0fc5 100644 --- a/source/remediation_runbooks/scripts/CreateCloudTrailMultiRegionTrail_process_results.py +++ b/source/remediation_runbooks/scripts/CreateCloudTrailMultiRegionTrail_process_results.py @@ -1,12 +1,14 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 def process_results(event, _): - print(f'Created encrypted CloudTrail bucket {event["cloudtrail_bucket"]}') - print(f'Created access logging for CloudTrail bucket in bucket {event["logging_bucket"]}') - print('Enabled multi-region AWS CloudTrail') - return { - "response": { - "message": "AWS CloudTrail successfully enabled", - "status": "Success" + print(f'Created encrypted CloudTrail bucket {event["cloudtrail_bucket"]}') + print( + f'Created access logging for CloudTrail bucket in bucket {event["logging_bucket"]}' + ) + print("Enabled multi-region AWS CloudTrail") + return { + "response": { + "message": "AWS CloudTrail successfully enabled", + "status": "Success", + } } - } diff --git a/source/remediation_runbooks/scripts/CreateIAMSupportRole.py b/source/remediation_runbooks/scripts/CreateIAMSupportRole.py index 32c7c70e..b050874b 100644 --- a/source/remediation_runbooks/scripts/CreateIAMSupportRole.py +++ b/source/remediation_runbooks/scripts/CreateIAMSupportRole.py @@ -1,12 +1,20 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import json -from botocore.config import Config +from typing import Dict, Final, List, Literal, TypedDict + import boto3 +from botocore.config import Config BOTO_CONFIG = Config(retries={"mode": "standard"}) -responses = {} + +class Response(TypedDict): + Account: str + RoleName: Literal["aws_incident_support_role"] + + +responses: Dict[Literal["CreateIAMRoleResponse"], List[Response]] = {} responses["CreateIAMRoleResponse"] = [] @@ -15,11 +23,15 @@ def connect_to_iam(boto_config): def get_account(boto_config): - return boto3.client('sts', config=boto_config).get_caller_identity()['Account'] + return boto3.client("sts", config=boto_config).get_caller_identity()["Account"] def get_partition(boto_config): - return boto3.client('sts', config=boto_config).get_caller_identity()['Arn'].split(':')[1] + return ( + boto3.client("sts", config=boto_config) + .get_caller_identity()["Arn"] + .split(":")[1] + ) def create_iam_role(_, __): @@ -37,7 +49,7 @@ def create_iam_role(_, __): ], } - role_name = "aws_incident_support_role" + role_name: Final = "aws_incident_support_role" iam = connect_to_iam(BOTO_CONFIG) if not does_role_exist(iam, role_name): iam.create_role( diff --git a/source/remediation_runbooks/scripts/CreateLogMetricFilterAndAlarm.py b/source/remediation_runbooks/scripts/CreateLogMetricFilterAndAlarm.py index f86b7ff1..6a289292 100644 --- a/source/remediation_runbooks/scripts/CreateLogMetricFilterAndAlarm.py +++ b/source/remediation_runbooks/scripts/CreateLogMetricFilterAndAlarm.py @@ -1,19 +1,15 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import boto3 import logging import os + +import boto3 from botocore.config import Config -boto_config = Config( - retries={ - 'max_attempts': 10, - 'mode': 'standard' - } -) +boto_config = Config(retries={"max_attempts": 10, "mode": "standard"}) log = logging.getLogger() -LOG_LEVEL = str(os.getenv('LogLevel', 'INFO')) +LOG_LEVEL = str(os.getenv("LogLevel", "INFO")) log.setLevel(LOG_LEVEL) @@ -27,7 +23,14 @@ def get_service_client(service_name): return boto3.client(service_name, config=boto_config) -def put_metric_filter(cw_log_group, filter_name, filter_pattern, metric_name, metric_namespace, metric_value): +def put_metric_filter( + cw_log_group, + filter_name, + filter_pattern, + metric_name, + metric_namespace, + metric_value, +): """ Puts the metric filter on the CloudWatch log group with provided values :param cw_log_group: Name of the CloudWatch log group @@ -37,9 +40,19 @@ def put_metric_filter(cw_log_group, filter_name, filter_pattern, metric_name, me :param metric_namespace: Namespace where metric is logged :param metric_value: Value to be logged for the metric """ - logs_client = get_service_client('logs') - log.debug("Putting the metric filter with values: {}".format([ - cw_log_group, filter_name, filter_pattern, metric_name, metric_namespace, metric_value])) + logs_client = get_service_client("logs") + log.debug( + "Putting the metric filter with values: {}".format( + [ + cw_log_group, + filter_name, + filter_pattern, + metric_name, + metric_namespace, + metric_value, + ] + ) + ) try: logs_client.put_metric_filter( logGroupName=cw_log_group, @@ -47,19 +60,21 @@ def put_metric_filter(cw_log_group, filter_name, filter_pattern, metric_name, me filterPattern=filter_pattern, metricTransformations=[ { - 'metricName': metric_name, - 'metricNamespace': metric_namespace, - 'metricValue': str(metric_value), - 'unit': 'Count' + "metricName": metric_name, + "metricNamespace": metric_namespace, + "metricValue": str(metric_value), + "unit": "Count", } - ] + ], ) except Exception as e: exit("Exception occurred while putting metric filter: " + str(e)) log.debug("Successfully added the metric filter.") -def put_metric_alarm(alarm_name, alarm_desc, alarm_threshold, metric_name, metric_namespace, topic_arn): +def put_metric_alarm( + alarm_name, alarm_desc, alarm_threshold, metric_name, metric_namespace, topic_arn +): """ Puts the metric alarm for the metric name with provided values :param alarm_name: Name for the alarm @@ -68,30 +83,29 @@ def put_metric_alarm(alarm_name, alarm_desc, alarm_threshold, metric_name, metri :param metric_name: Name of the metric :param metric_namespace: Namespace where metric is logged """ - cw_client = get_service_client('cloudwatch') - log.debug("Putting the metric alarm with values {}".format( - [alarm_name, alarm_desc, alarm_threshold, metric_name, metric_namespace])) + cw_client = get_service_client("cloudwatch") + log.debug( + "Putting the metric alarm with values {}".format( + [alarm_name, alarm_desc, alarm_threshold, metric_name, metric_namespace] + ) + ) try: cw_client.put_metric_alarm( AlarmName=alarm_name, AlarmDescription=alarm_desc, ActionsEnabled=True, - OKActions=[ - topic_arn - ], - AlarmActions=[ - topic_arn - ], + OKActions=[topic_arn], + AlarmActions=[topic_arn], MetricName=metric_name, Namespace=metric_namespace, - Statistic='Sum', + Statistic="Sum", Period=300, - Unit='Count', + Unit="Count", EvaluationPeriods=12, DatapointsToAlarm=1, Threshold=alarm_threshold, - ComparisonOperator='GreaterThanOrEqualToThreshold', - TreatMissingData='notBreaching' + ComparisonOperator="GreaterThanOrEqualToThreshold", + TreatMissingData="notBreaching", ) except Exception as e: exit("Exception occurred while putting metric alarm: " + str(e)) @@ -103,22 +117,36 @@ def verify(event, _): log.debug("====Print Event====") log.debug(event) - filter_name = event['FilterName'] - filter_pattern = event['FilterPattern'] - metric_name = event['MetricName'] - metric_namespace = event['MetricNamespace'] - metric_value = event['MetricValue'] - alarm_name = event['AlarmName'] - alarm_desc = event['AlarmDesc'] - alarm_threshold = event['AlarmThreshold'] - cw_log_group = event['LogGroupName'] - topic_arn = event['TopicArn'] + filter_name = event["FilterName"] + filter_pattern = event["FilterPattern"] + metric_name = event["MetricName"] + metric_namespace = event["MetricNamespace"] + metric_value = event["MetricValue"] + alarm_name = event["AlarmName"] + alarm_desc = event["AlarmDesc"] + alarm_threshold = event["AlarmThreshold"] + cw_log_group = event["LogGroupName"] + topic_arn = event["TopicArn"] - put_metric_filter(cw_log_group, filter_name, filter_pattern, metric_name, metric_namespace, metric_value) - put_metric_alarm(alarm_name, alarm_desc, alarm_threshold, metric_name, metric_namespace, topic_arn) + put_metric_filter( + cw_log_group, + filter_name, + filter_pattern, + metric_name, + metric_namespace, + metric_value, + ) + put_metric_alarm( + alarm_name, + alarm_desc, + alarm_threshold, + metric_name, + metric_namespace, + topic_arn, + ) return { "response": { "message": f'Created filter {event["FilterName"]} for metric {event["MetricName"]}, and alarm {event["AlarmName"]}', - "status": "Success" + "status": "Success", } } diff --git a/source/remediation_runbooks/scripts/CreateLogMetricFilterAndAlarm_createtopic.py b/source/remediation_runbooks/scripts/CreateLogMetricFilterAndAlarm_createtopic.py index 267e91e9..ea1341b6 100644 --- a/source/remediation_runbooks/scripts/CreateLogMetricFilterAndAlarm_createtopic.py +++ b/source/remediation_runbooks/scripts/CreateLogMetricFilterAndAlarm_createtopic.py @@ -1,91 +1,87 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import json + import boto3 from botocore.config import Config from botocore.exceptions import ClientError -boto_config = Config( - retries ={ - 'mode': 'standard' - } -) +boto_config = Config(retries={"mode": "standard"}) + def connect_to_sns(): - return boto3.client('sns', config=boto_config) + return boto3.client("sns", config=boto_config) + def connect_to_ssm(): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) -def create_encrypted_topic(event, _): - kms_key_arn = event['kms_key_arn'] +def create_encrypted_topic(event, _): + kms_key_arn = event["kms_key_arn"] new_topic = False - topic_arn = '' - topic_name = event['topic_name'] + topic_arn = "" + topic_name = event["topic_name"] try: sns = connect_to_sns() topic_arn = sns.create_topic( - Name=topic_name, - Attributes={ - 'KmsMasterKeyId': kms_key_arn.split('key/')[1] - } - )['TopicArn'] + Name=topic_name, Attributes={"KmsMasterKeyId": kms_key_arn.split("key/")[1]} + )["TopicArn"] new_topic = True except ClientError as client_exception: - exception_type = client_exception.response['Error']['Code'] - if exception_type == 'InvalidParameter': - print(f'Topic {topic_name} already exists. This remediation may have been run before.') - print('Ignoring exception - remediation continues.') - topic_arn = sns.create_topic( - Name=topic_name - )['TopicArn'] + exception_type = client_exception.response["Error"]["Code"] + if exception_type == "InvalidParameter": + print( + f"Topic {topic_name} already exists. This remediation may have been run before." + ) + print("Ignoring exception - remediation continues.") + topic_arn = sns.create_topic(Name=topic_name)["TopicArn"] else: - exit(f'ERROR: Unhandled client exception: {client_exception}') + exit(f"ERROR: Unhandled client exception: {client_exception}") except Exception as e: - exit(f'ERROR: could not create SNS Topic {topic_name}: {str(e)}') + exit(f"ERROR: could not create SNS Topic {topic_name}: {str(e)}") if new_topic: try: ssm = connect_to_ssm() ssm.put_parameter( - Name='/Solutions/SO0111/SNS_Topic_CIS3.x', - Description='SNS Topic for AWS Config updates', - Type='String', + Name="/Solutions/SO0111/SNS_Topic_CIS3.x", + Description="SNS Topic for AWS Config updates", + Type="String", Overwrite=True, - Value=topic_arn + Value=topic_arn, ) except Exception as e: - exit(f'ERROR: could not create SNS Topic {topic_name}: {str(e)}') + exit(f"ERROR: could not create SNS Topic {topic_name}: {str(e)}") create_topic_policy(topic_arn) return {"topic_arn": topic_arn} + def create_topic_policy(topic_arn): sns = connect_to_sns() try: topic_policy = { "Id": "Policy_ID", "Statement": [ - { - "Sid": "AWSConfigSNSPolicy", - "Effect": "Allow", - "Principal": { - "Service": "cloudwatch.amazonaws.com" - }, - "Action": "SNS:Publish", - "Resource": topic_arn, - }] + { + "Sid": "AWSConfigSNSPolicy", + "Effect": "Allow", + "Principal": {"Service": "cloudwatch.amazonaws.com"}, + "Action": "SNS:Publish", + "Resource": topic_arn, + } + ], } sns.set_topic_attributes( TopicArn=topic_arn, - AttributeName='Policy', - AttributeValue=json.dumps(topic_policy) + AttributeName="Policy", + AttributeValue=json.dumps(topic_policy), ) except Exception as e: - exit(f'ERROR: Failed to SetTopicAttributes for {topic_arn}: {str(e)}') + exit(f"ERROR: Failed to SetTopicAttributes for {topic_arn}: {str(e)}") diff --git a/source/remediation_runbooks/scripts/DisableTGWAutoAcceptSharedAttachments.py b/source/remediation_runbooks/scripts/DisableTGWAutoAcceptSharedAttachments.py new file mode 100644 index 00000000..e385afaf --- /dev/null +++ b/source/remediation_runbooks/scripts/DisableTGWAutoAcceptSharedAttachments.py @@ -0,0 +1,43 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import boto3 +from botocore.config import Config + +boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) + + +def connect_to_ec2(): + return boto3.client("ec2", config=boto_config) + + +def lambda_handler(event, _): + tgw_id = event["TransitGatewayId"] + + ec2 = connect_to_ec2() + + try: + ec2.modify_transit_gateway( + TransitGatewayId=tgw_id, Options={"AutoAcceptSharedAttachments": "disable"} + ) + + tgw_updated = ec2.describe_transit_gateways(TransitGatewayIds=[tgw_id]) + if ( + tgw_updated["TransitGateways"][0]["Options"]["AutoAcceptSharedAttachments"] + == "disable" + ): + return { + "response": { + "message": "Transit Gateway AutoAcceptSharedAttachments option disabled.", + "status": "Success", + } + } + else: + return { + "response": { + "message": "Failed to disable AutoAcceptSharedAttachments on Transit Gateway.", + "status": "Failed", + } + } + + except Exception as e: + exit("Failed to disable AutoAcceptSharedAttachments: " + str(e)) diff --git a/source/remediation_runbooks/scripts/DisableUnrestrictedAccessToHighRiskPorts.py b/source/remediation_runbooks/scripts/DisableUnrestrictedAccessToHighRiskPorts.py new file mode 100644 index 00000000..0e4ef43d --- /dev/null +++ b/source/remediation_runbooks/scripts/DisableUnrestrictedAccessToHighRiskPorts.py @@ -0,0 +1,107 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import boto3 +from botocore.config import Config + +boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) + +# List of high risk ports to check for unrestricted access +PORTS_TO_CHECK = { + 20, + 21, + 22, + 23, + 25, + 110, + 135, + 143, + 445, + 1433, + 1434, + 3000, + 3306, + 3389, + 4333, + 5000, + 5432, + 5500, + 5601, + 8080, + 8088, + 8888, + 9200, + 9300, +} +# IPV4 and IPV6 open access +OPENIPV4 = "0.0.0.0/0" +OPENIPV6 = "::/0" +PROTOCOLS = {"tcp", "udp"} + + +def connect_to_ec2(): + return boto3.client("ec2", config=boto_config) + + +def lambda_handler(event, _): + security_group_id = event["SecurityGroupId"] + + ec2 = connect_to_ec2() + + try: + # Get the security group rules + security_group_rules = ec2.describe_security_group_rules( + Filters=[ + { + "Name": "group-id", + "Values": [ + security_group_id, + ], + }, + ], + ) + + # List to return rules that are deleted + rules_deleted = [] + + for rule in security_group_rules["SecurityGroupRules"]: + # Look for TCP or UDP ingress rules + if rule["IpProtocol"] in PROTOCOLS and not rule["IsEgress"]: + # Check for high risk ports + if any( + port in range(rule["FromPort"], rule["ToPort"] + 1) + for port in PORTS_TO_CHECK + ): + # Check for IPV4 open access + if "CidrIpv4" in rule and rule["CidrIpv4"] == OPENIPV4: + # Add rule to list + rules_deleted.append(rule["SecurityGroupRuleId"]) + # Delete the rule + ec2.revoke_security_group_ingress( + GroupId=security_group_id, + SecurityGroupRuleIds=[ + rule["SecurityGroupRuleId"], + ], + ) + + # Check for IPV6 open access + elif "CidrIpv6" in rule and rule["CidrIpv6"] == OPENIPV6: + # Add rule to list + rules_deleted.append(rule["SecurityGroupRuleId"]) + + # Delete the rule + ec2.revoke_security_group_ingress( + GroupId=security_group_id, + SecurityGroupRuleIds=[ + rule["SecurityGroupRuleId"], + ], + ) + + return { + "message": "Successfully removed security group rules on " + + security_group_id, + "status": "Success", + "rules_deleted": rules_deleted, + } + + except Exception as e: + exit("Failed to remove security group rules: " + str(e)) diff --git a/source/remediation_runbooks/scripts/EnableAWSConfig_createconfigbucket.py b/source/remediation_runbooks/scripts/EnableAWSConfig_createconfigbucket.py index 6bc6fe3f..88a0a9fa 100644 --- a/source/remediation_runbooks/scripts/EnableAWSConfig_createconfigbucket.py +++ b/source/remediation_runbooks/scripts/EnableAWSConfig_createconfigbucket.py @@ -1,67 +1,62 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import json + import boto3 from botocore.config import Config from botocore.exceptions import ClientError -from botocore.retries import bucket -boto_config = Config( - retries ={ - 'mode': 'standard' - } -) +boto_config = Config(retries={"mode": "standard"}) + def connect_to_s3(boto_config): - return boto3.client('s3', config=boto_config) + return boto3.client("s3", config=boto_config) + def create_bucket(bucket_name, aws_region): s3 = connect_to_s3(boto_config) try: - if aws_region == 'us-east-1': - s3.create_bucket( - ACL='private', - Bucket=bucket_name - ) + if aws_region == "us-east-1": + s3.create_bucket(ACL="private", Bucket=bucket_name) else: s3.create_bucket( - ACL='private', + ACL="private", Bucket=bucket_name, - CreateBucketConfiguration={ - 'LocationConstraint': aws_region - } + CreateBucketConfiguration={"LocationConstraint": aws_region}, ) return "created" except ClientError as ex: - exception_type = ex.response['Error']['Code'] + exception_type = ex.response["Error"]["Code"] # bucket already exists - return if exception_type in ["BucketAlreadyExists", "BucketAlreadyOwnedByYou"]: - print('Bucket ' + bucket_name + ' already exists') + print("Bucket " + bucket_name + " already exists") return "already exists" else: - exit(f'ERROR creating bucket {bucket_name}: {str(ex)}') + exit(f"ERROR creating bucket {bucket_name}: {str(ex)}") except Exception as e: - exit(f'ERROR creating bucket {bucket_name}: {str(e)}') + exit(f"ERROR creating bucket {bucket_name}: {str(e)}") + def encrypt_bucket(bucket_name, kms_key): s3 = connect_to_s3(boto_config) try: s3.put_bucket_encryption( - Bucket=bucket_name, - ServerSideEncryptionConfiguration={ - 'Rules': [ - { - 'ApplyServerSideEncryptionByDefault': { - 'SSEAlgorithm': 'aws:kms', - 'KMSMasterKeyID': kms_key - } - } - ] - } - ) + Bucket=bucket_name, + ServerSideEncryptionConfiguration={ + "Rules": [ + { + "ApplyServerSideEncryptionByDefault": { + "SSEAlgorithm": "aws:kms", + "KMSMasterKeyID": kms_key, + } + } + ] + }, + ) except Exception as e: - exit(f'ERROR putting bucket encryption for {bucket_name}: {str(e)}') + exit(f"ERROR putting bucket encryption for {bucket_name}: {str(e)}") + def block_public_access(bucket_name): s3 = connect_to_s3(boto_config) @@ -69,14 +64,15 @@ def block_public_access(bucket_name): s3.put_public_access_block( Bucket=bucket_name, PublicAccessBlockConfiguration={ - 'BlockPublicAcls': True, - 'IgnorePublicAcls': True, - 'BlockPublicPolicy': True, - 'RestrictPublicBuckets': True - } + "BlockPublicAcls": True, + "IgnorePublicAcls": True, + "BlockPublicPolicy": True, + "RestrictPublicBuckets": True, + }, ) except Exception as e: - exit(f'ERROR setting public access block for bucket {bucket_name}: {str(e)}') + exit(f"ERROR setting public access block for bucket {bucket_name}: {str(e)}") + def enable_access_logging(bucket_name, logging_bucket): s3 = connect_to_s3(boto_config) @@ -84,14 +80,15 @@ def enable_access_logging(bucket_name, logging_bucket): s3.put_bucket_logging( Bucket=bucket_name, BucketLoggingStatus={ - 'LoggingEnabled': { - 'TargetBucket': logging_bucket, - 'TargetPrefix': f'access-logs/{bucket_name}' - } - } + "LoggingEnabled": { + "TargetBucket": logging_bucket, + "TargetPrefix": f"access-logs/{bucket_name}", + } + }, ) except Exception as e: - exit(f'Error setting access logging for bucket {bucket_name}: {str(e)}') + exit(f"Error setting access logging for bucket {bucket_name}: {str(e)}") + def create_bucket_policy(config_bucket, aws_partition): s3 = connect_to_s3(boto_config) @@ -99,66 +96,53 @@ def create_bucket_policy(config_bucket, aws_partition): bucket_policy = { "Version": "2012-10-17", "Statement": [ - { - "Sid": "AWSConfigBucketPermissionsCheck", - "Effect": "Allow", - "Principal": { - "Service": [ - "config.amazonaws.com" #NOSONAR - ] + { + "Sid": "AWSConfigBucketPermissionsCheck", + "Effect": "Allow", + "Principal": {"Service": ["config.amazonaws.com"]}, # NOSONAR + "Action": "s3:GetBucketAcl", + "Resource": "arn:" + aws_partition + ":s3:::" + config_bucket, }, - "Action": "s3:GetBucketAcl", - "Resource": "arn:" + aws_partition + ":s3:::" + config_bucket - }, - { - "Sid": "AWSConfigBucketExistenceCheck", - "Effect": "Allow", - "Principal": { - "Service": [ - "config.amazonaws.com" - ] + { + "Sid": "AWSConfigBucketExistenceCheck", + "Effect": "Allow", + "Principal": {"Service": ["config.amazonaws.com"]}, + "Action": "s3:ListBucket", + "Resource": "arn:" + aws_partition + ":s3:::" + config_bucket, }, - "Action": "s3:ListBucket", - "Resource": "arn:" + aws_partition + ":s3:::" + config_bucket - }, - { - "Sid": "AWSConfigBucketDelivery", - "Effect": "Allow", - "Principal": { - "Service": [ - "config.amazonaws.com" - ] + { + "Sid": "AWSConfigBucketDelivery", + "Effect": "Allow", + "Principal": {"Service": ["config.amazonaws.com"]}, + "Action": "s3:PutObject", + "Resource": "arn:" + + aws_partition + + ":s3:::" + + config_bucket + + "/*", + "Condition": { + "StringEquals": {"s3:x-amz-acl": "bucket-owner-full-control"} + }, }, - "Action": "s3:PutObject", - "Resource": "arn:" + aws_partition + ":s3:::" + config_bucket + "/*", - "Condition": { - "StringEquals": { - "s3:x-amz-acl": "bucket-owner-full-control" - } - } - } - ] + ], } - s3.put_bucket_policy( - Bucket=config_bucket, - Policy=json.dumps(bucket_policy) - ) + s3.put_bucket_policy(Bucket=config_bucket, Policy=json.dumps(bucket_policy)) except Exception as e: - exit(f'ERROR: PutBucketPolicy failed for {config_bucket}: {str(e)}') + exit(f"ERROR: PutBucketPolicy failed for {config_bucket}: {str(e)}") -def create_encrypted_bucket(event, _): - kms_key_arn = event['kms_key_arn'] - aws_partition = event['partition'] - aws_account = event['account'] - aws_region = event['region'] - logging_bucket = event['logging_bucket'] - bucket_name = 'so0111-aws-config-' + aws_region + '-' + aws_account +def create_encrypted_bucket(event, _): + kms_key_arn = event["kms_key_arn"] + aws_partition = event["partition"] + aws_account = event["account"] + aws_region = event["region"] + logging_bucket = event["logging_bucket"] + bucket_name = "so0111-aws-config-" + aws_region + "-" + aws_account - if create_bucket(bucket_name, aws_region) == 'already exists': + if create_bucket(bucket_name, aws_region) == "already exists": return {"config_bucket": bucket_name} - encrypt_bucket(bucket_name, kms_key_arn.split('key/')[1]) + encrypt_bucket(bucket_name, kms_key_arn.split("key/")[1]) block_public_access(bucket_name) enable_access_logging(bucket_name, logging_bucket) create_bucket_policy(bucket_name, aws_partition) diff --git a/source/remediation_runbooks/scripts/EnableAWSConfig_createtopic.py b/source/remediation_runbooks/scripts/EnableAWSConfig_createtopic.py index 7a3d02ea..53bf0790 100644 --- a/source/remediation_runbooks/scripts/EnableAWSConfig_createtopic.py +++ b/source/remediation_runbooks/scripts/EnableAWSConfig_createtopic.py @@ -1,91 +1,87 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import json + import boto3 from botocore.config import Config from botocore.exceptions import ClientError -boto_config = Config( - retries ={ - 'mode': 'standard' - } -) +boto_config = Config(retries={"mode": "standard"}) + def connect_to_sns(): - return boto3.client('sns', config=boto_config) + return boto3.client("sns", config=boto_config) + def connect_to_ssm(): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) -def create_encrypted_topic(event, _): - kms_key_arn = event['kms_key_arn'] +def create_encrypted_topic(event, _): + kms_key_arn = event["kms_key_arn"] new_topic = False - topic_arn = '' - topic_name = event['topic_name'] + topic_arn = "" + topic_name = event["topic_name"] try: sns = connect_to_sns() topic_arn = sns.create_topic( - Name=topic_name, - Attributes={ - 'KmsMasterKeyId': kms_key_arn.split('key/')[1] - } - )['TopicArn'] + Name=topic_name, Attributes={"KmsMasterKeyId": kms_key_arn.split("key/")[1]} + )["TopicArn"] new_topic = True except ClientError as client_exception: - exception_type = client_exception.response['Error']['Code'] - if exception_type == 'InvalidParameter': - print(f'Topic {topic_name} already exists. This remediation may have been run before.') - print('Ignoring exception - remediation continues.') - topic_arn = sns.create_topic( - Name=topic_name - )['TopicArn'] + exception_type = client_exception.response["Error"]["Code"] + if exception_type == "InvalidParameter": + print( + f"Topic {topic_name} already exists. This remediation may have been run before." + ) + print("Ignoring exception - remediation continues.") + topic_arn = sns.create_topic(Name=topic_name)["TopicArn"] else: - exit(f'ERROR: Unhandled client exception: {client_exception}') + exit(f"ERROR: Unhandled client exception: {client_exception}") except Exception as e: - exit(f'ERROR: could not create SNS Topic {topic_name}: {str(e)}') + exit(f"ERROR: could not create SNS Topic {topic_name}: {str(e)}") if new_topic: try: ssm = connect_to_ssm() ssm.put_parameter( - Name='/Solutions/SO0111/SNS_Topic_Config.1', - Description='SNS Topic for AWS Config updates', - Type='String', + Name="/Solutions/SO0111/SNS_Topic_Config.1", + Description="SNS Topic for AWS Config updates", + Type="String", Overwrite=True, - Value=topic_arn + Value=topic_arn, ) except Exception as e: - exit(f'ERROR: could not create SNS Topic {topic_name}: {str(e)}') + exit(f"ERROR: could not create SNS Topic {topic_name}: {str(e)}") create_topic_policy(topic_arn) return {"topic_arn": topic_arn} + def create_topic_policy(topic_arn): sns = connect_to_sns() try: topic_policy = { "Id": "Policy_ID", "Statement": [ - { - "Sid": "AWSConfigSNSPolicy", - "Effect": "Allow", - "Principal": { - "Service": "config.amazonaws.com" - }, - "Action": "SNS:Publish", - "Resource": topic_arn, - }] + { + "Sid": "AWSConfigSNSPolicy", + "Effect": "Allow", + "Principal": {"Service": "config.amazonaws.com"}, + "Action": "SNS:Publish", + "Resource": topic_arn, + } + ], } sns.set_topic_attributes( TopicArn=topic_arn, - AttributeName='Policy', - AttributeValue=json.dumps(topic_policy) + AttributeName="Policy", + AttributeValue=json.dumps(topic_policy), ) except Exception as e: - exit(f'ERROR: Failed to SetTopicAttributes for {topic_arn}: {str(e)}') + exit(f"ERROR: Failed to SetTopicAttributes for {topic_arn}: {str(e)}") diff --git a/source/remediation_runbooks/scripts/EnableAWSConfig_enableconfig.py b/source/remediation_runbooks/scripts/EnableAWSConfig_enableconfig.py index 52c581d0..63c83783 100644 --- a/source/remediation_runbooks/scripts/EnableAWSConfig_enableconfig.py +++ b/source/remediation_runbooks/scripts/EnableAWSConfig_enableconfig.py @@ -4,78 +4,88 @@ from botocore.config import Config from botocore.exceptions import ClientError -boto_config = Config( - retries ={ - 'mode': 'standard' - } -) +boto_config = Config(retries={"mode": "standard"}) + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def create_config_recorder(aws_partition, aws_account, aws_service_role): cfgsvc = connect_to_config(boto_config) try: - config_service_role_arn = 'arn:' + aws_partition + ':iam::' + aws_account + ':role/' + aws_service_role + config_service_role_arn = ( + "arn:" + + aws_partition + + ":iam::" + + aws_account + + ":role/" + + aws_service_role + ) cfgsvc.put_configuration_recorder( ConfigurationRecorder={ - 'name': 'default', - 'roleARN': config_service_role_arn, - 'recordingGroup': { - 'allSupported': True, - 'includeGlobalResourceTypes': True - } + "name": "default", + "roleARN": config_service_role_arn, + "recordingGroup": { + "allSupported": True, + "includeGlobalResourceTypes": True, + }, } ) except ClientError as ex: - exception_type = ex.response['Error']['Code'] + exception_type = ex.response["Error"]["Code"] # recorder already exists - continue if exception_type in ["MaxNumberOfConfigurationRecordersExceededException"]: - print('Config Recorder already exists. Continuing.') + print("Config Recorder already exists. Continuing.") else: - exit(f'ERROR: Boto3 ClientError enabling Config: {exception_type} - {str(ex)}') + exit( + f"ERROR: Boto3 ClientError enabling Config: {exception_type} - {str(ex)}" + ) except Exception as e: - exit(f'ERROR enabling AWS Config - create_config_recorder: {str(e)}') + exit(f"ERROR enabling AWS Config - create_config_recorder: {str(e)}") + def create_delivery_channel(config_bucket, aws_account, topic_arn): cfgsvc = connect_to_config(boto_config) try: cfgsvc.put_delivery_channel( DeliveryChannel={ - 'name': 'default', - 's3BucketName': config_bucket, - 's3KeyPrefix': aws_account, - 'snsTopicARN': topic_arn, - 'configSnapshotDeliveryProperties': { - 'deliveryFrequency': 'Twelve_Hours' - } + "name": "default", + "s3BucketName": config_bucket, + "s3KeyPrefix": aws_account, + "snsTopicARN": topic_arn, + "configSnapshotDeliveryProperties": { + "deliveryFrequency": "Twelve_Hours" + }, } ) except ClientError as ex: - exception_type = ex.response['Error']['Code'] + exception_type = ex.response["Error"]["Code"] # delivery channel already exists - return if exception_type in ["MaxNumberOfDeliveryChannelsExceededException"]: - print('DeliveryChannel already exists') + print("DeliveryChannel already exists") else: - exit(f'ERROR: Boto3 ClientError enabling Config: {exception_type} - {str(ex)}') + exit( + f"ERROR: Boto3 ClientError enabling Config: {exception_type} - {str(ex)}" + ) except Exception as e: - exit(f'ERROR enabling AWS Config - create_delivery_channel: {str(e)}') + exit(f"ERROR enabling AWS Config - create_delivery_channel: {str(e)}") + def start_recorder(): cfgsvc = connect_to_config(boto_config) try: - cfgsvc.start_configuration_recorder( - ConfigurationRecorderName='default' - ) + cfgsvc.start_configuration_recorder(ConfigurationRecorderName="default") except Exception as e: - exit(f'ERROR enabling AWS Config: {str(e)}') + exit(f"ERROR enabling AWS Config: {str(e)}") + def enable_config(event, _): - aws_account = event['account'] - aws_partition = event['partition'] - aws_service_role = event['aws_service_role'] - config_bucket = event['config_bucket'] - topic_arn = event['topic_arn'] + aws_account = event["account"] + aws_partition = event["partition"] + aws_service_role = event["aws_service_role"] + config_bucket = event["config_bucket"] + topic_arn = event["topic_arn"] create_config_recorder(aws_partition, aws_account, aws_service_role) create_delivery_channel(config_bucket, aws_account, topic_arn) diff --git a/source/remediation_runbooks/scripts/EnableAWSConfig_summary.py b/source/remediation_runbooks/scripts/EnableAWSConfig_summary.py index 395c30f9..dedcd93c 100644 --- a/source/remediation_runbooks/scripts/EnableAWSConfig_summary.py +++ b/source/remediation_runbooks/scripts/EnableAWSConfig_summary.py @@ -3,11 +3,10 @@ def process_results(event, _): print(f'Created encrypted SNS topic {event["sns_topic_arn"]}') print(f'Created encrypted Config bucket {event["config_bucket"]}') - print(f'Created access logging for Config bucket in bucket {event["logging_bucket"]}') - print('Enabled AWS Config by creating a default recorder') + print( + f'Created access logging for Config bucket in bucket {event["logging_bucket"]}' + ) + print("Enabled AWS Config by creating a default recorder") return { - "response": { - "message": "AWS Config successfully enabled", - "status": "Success" - } + "response": {"message": "AWS Config successfully enabled", "status": "Success"} } diff --git a/source/remediation_runbooks/scripts/EnableAutoScalingGroupELBHealthCheck_validate.py b/source/remediation_runbooks/scripts/EnableAutoScalingGroupELBHealthCheck_validate.py index 8e366bd9..4bd4f52a 100644 --- a/source/remediation_runbooks/scripts/EnableAutoScalingGroupELBHealthCheck_validate.py +++ b/source/remediation_runbooks/scripts/EnableAutoScalingGroupELBHealthCheck_validate.py @@ -1,43 +1,40 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import json + import boto3 from botocore.config import Config -from botocore.exceptions import ClientError + def connect_to_autoscaling(boto_config): - return boto3.client('autoscaling', config=boto_config) + return boto3.client("autoscaling", config=boto_config) -def verify(event, _): - boto_config = Config( - retries ={ - 'mode': 'standard' - } - ) +def verify(event, _): + boto_config = Config(retries={"mode": "standard"}) asg_client = connect_to_autoscaling(boto_config) - asg_name = event['AsgName'] + asg_name = event["AsgName"] try: desc_asg = asg_client.describe_auto_scaling_groups( AutoScalingGroupNames=[asg_name] ) - if len(desc_asg['AutoScalingGroups']) < 1: - exit(f'No AutoScaling Group found matching {asg_name}') + if len(desc_asg["AutoScalingGroups"]) < 1: + exit(f"No AutoScaling Group found matching {asg_name}") - health_check = desc_asg['AutoScalingGroups'][0]['HealthCheckType'] - print(json.dumps(desc_asg['AutoScalingGroups'][0], default=str)) - if (health_check == 'ELB'): + health_check = desc_asg["AutoScalingGroups"][0]["HealthCheckType"] + print(json.dumps(desc_asg["AutoScalingGroups"][0], default=str)) + if health_check == "ELB": return { "response": { "message": "Autoscaling Group health check type updated to ELB", - "status": "Success" + "status": "Success", } } else: return { "response": { "message": "Autoscaling Group health check type is not ELB", - "status": "Failed" + "status": "Failed", } } except Exception as e: diff --git a/source/remediation_runbooks/scripts/EnableAutoSecretRotation.py b/source/remediation_runbooks/scripts/EnableAutoSecretRotation.py new file mode 100644 index 00000000..73d84ef2 --- /dev/null +++ b/source/remediation_runbooks/scripts/EnableAutoSecretRotation.py @@ -0,0 +1,60 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +import boto3 +from botocore.config import Config + +BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + + +def connect_to_secretsmanager(): + return boto3.client("secretsmanager", config=BOTO_CONFIG) + + +# Check if secret rotation is enabled on the secet. +def check_secret_rotation(secret_arn, secretsmanager_client): + response = secretsmanager_client.describe_secret(SecretId=secret_arn) + if "RotationEnabled" in response: + if response["RotationEnabled"]: + return True + else: + return False + + +def lambda_handler(event, _): + secret_arn = event["SecretARN"] + number_of_days = event["MaximumAllowedRotationFrequency"] + + secretsmanager = connect_to_secretsmanager() + + try: + # Set rotation schedule following best practices + secretsmanager.rotate_secret( + SecretId=secret_arn, + RotationRules={ + "AutomaticallyAfterDays": int(number_of_days), + }, + RotateImmediately=False, + ) + + # Verify secret rotation is enabled. + if check_secret_rotation(secret_arn, secretsmanager): + return { + "message": f"Enabled automatic secret rotation every {number_of_days} days with previously set rotation function.", + "status": "Success", + } + else: + raise RuntimeError( + "Failed to set automatic rotation schedule. Please manually set rotation on the secret." + ) + + # If a Lambda function ARN is not associated, an exception will be thrown. + except Exception as e: + # Verify secret rotation is enabled. + if check_secret_rotation(secret_arn, secretsmanager): + return { + "message": f"Enabled automatic secret rotation every {number_of_days} days with previously set function.", + "status": "Success", + } + else: + exit(f"Error when setting automatic rotation schedule: {str(e)}") diff --git a/source/remediation_runbooks/scripts/EnableCloudTrailEncryption.py b/source/remediation_runbooks/scripts/EnableCloudTrailEncryption.py index fa003b61..ba1c764e 100644 --- a/source/remediation_runbooks/scripts/EnableCloudTrailEncryption.py +++ b/source/remediation_runbooks/scripts/EnableCloudTrailEncryption.py @@ -2,10 +2,11 @@ # SPDX-License-Identifier: Apache-2.0 import boto3 from botocore.config import Config -from botocore.exceptions import ClientError + def connect_to_cloudtrail(region, boto_config): - return boto3.client('cloudtrail', region_name=region, config=boto_config) + return boto3.client("cloudtrail", region_name=region, config=boto_config) + def enable_trail_encryption(event, _): """ @@ -13,28 +14,21 @@ def enable_trail_encryption(event, _): On success returns a string map On failure returns NoneType """ - boto_config = Config( - retries ={ - 'mode': 'standard' - } - ) + boto_config = Config(retries={"mode": "standard"}) - if event['trail_region'] != event['exec_region']: - exit('ERROR: cross-region remediation is not yet supported') + if event["trail_region"] != event["exec_region"]: + exit("ERROR: cross-region remediation is not yet supported") - ctrail_client = connect_to_cloudtrail(event['trail_region'], boto_config) - kms_key_arn = event['kms_key_arn'] + ctrail_client = connect_to_cloudtrail(event["trail_region"], boto_config) + kms_key_arn = event["kms_key_arn"] try: - ctrail_client.update_trail( - Name=event['trail'], - KmsKeyId=kms_key_arn - ) + ctrail_client.update_trail(Name=event["trail"], KmsKeyId=kms_key_arn) return { "response": { "message": f'Enabled KMS CMK encryption on {event["trail"]}', - "status": "Success" + "status": "Success", } } except Exception as e: - exit(f'Error enabling SSE-KMS encryption: {str(e)}') + exit(f"Error enabling SSE-KMS encryption: {str(e)}") diff --git a/source/remediation_runbooks/scripts/EnableCloudTrailToCloudWatchLogging_waitforloggroup.py b/source/remediation_runbooks/scripts/EnableCloudTrailToCloudWatchLogging_waitforloggroup.py index 84af170a..456a6ef1 100644 --- a/source/remediation_runbooks/scripts/EnableCloudTrailToCloudWatchLogging_waitforloggroup.py +++ b/source/remediation_runbooks/scripts/EnableCloudTrailToCloudWatchLogging_waitforloggroup.py @@ -1,32 +1,34 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import time + import boto3 from botocore.config import Config + def connect_to_logs(boto_config): - return boto3.client('logs', config=boto_config) + return boto3.client("logs", config=boto_config) + def sleep_between_attempts(): time.sleep(2) + def wait_for_loggroup(event, _): - boto_config = Config( - retries ={ - 'mode': 'standard' - } - ) + boto_config = Config(retries={"mode": "standard"}) cwl_client = connect_to_logs(boto_config) max_retries = 3 attempts = 0 while attempts < max_retries: try: - describe_group = cwl_client.describe_log_groups(logGroupNamePrefix=event['LogGroup']) - print(len(describe_group['logGroups'])) - for group in describe_group['logGroups']: - if group['logGroupName'] == event['LogGroup']: - return str(group['arn']) + describe_group = cwl_client.describe_log_groups( + logGroupNamePrefix=event["LogGroup"] + ) + print(len(describe_group["logGroups"])) + for group in describe_group["logGroups"]: + if group["logGroupName"] == event["LogGroup"]: + return str(group["arn"]) # no match - wait and retry sleep_between_attempts() attempts += 1 diff --git a/source/remediation_runbooks/scripts/EnableGuardDuty.py b/source/remediation_runbooks/scripts/EnableGuardDuty.py new file mode 100644 index 00000000..b7a69e21 --- /dev/null +++ b/source/remediation_runbooks/scripts/EnableGuardDuty.py @@ -0,0 +1,50 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import boto3 +from botocore.config import Config + +BOTO_CONFIG = Config(retries={"mode": "standard"}) + + +def connect_to_guardduty(boto_config): + return boto3.client("guardduty", config=boto_config) + + +def lambda_handler(_, __): + guardduty = connect_to_guardduty(BOTO_CONFIG) + + detector_list = guardduty.list_detectors()["DetectorIds"] + + if detector_list == []: + detector = guardduty.create_detector( + Enable=True, + DataSources={ + "S3Logs": {"Enable": True}, + "Kubernetes": {"AuditLogs": {"Enable": True}}, + }, + ) + + return { + "output": { + "Message": f'GuardDuty Enabled. Detector {detector["DetectorId"]} created' + } + } + + else: + for detector_id in detector_list: + if guardduty.get_detector(DetectorId=detector_id)["Status"] == "DISABLED": + guardduty.update_detector( + DetectorId=detector_id, + Enable=True, + DataSources={ + "S3Logs": {"Enable": True}, + "Kubernetes": {"AuditLogs": {"Enable": True}}, + }, + ) + return { + "output": { + "Message": f"GuardDuty Enabled. Existing detector {detector_id} has been enabled." + } + } + + return {"output": {"Message": "GuardDuty is already enabled."}} diff --git a/source/remediation_runbooks/scripts/EnablePrivateRepositoryScanning.py b/source/remediation_runbooks/scripts/EnablePrivateRepositoryScanning.py new file mode 100644 index 00000000..1047d6d8 --- /dev/null +++ b/source/remediation_runbooks/scripts/EnablePrivateRepositoryScanning.py @@ -0,0 +1,21 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import boto3 +from botocore.config import Config + +boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) + + +def connect_to_ecr(): + return boto3.client("ecr", config=boto_config) + + +def lambda_handler(event, _): + repository_name = event["RepositoryName"] + ecr = connect_to_ecr() + + response = ecr.put_image_scanning_configuration( + repositoryName=repository_name, imageScanningConfiguration={"scanOnPush": True} + ) + + return response diff --git a/source/remediation_runbooks/scripts/EnableVPCFlowLogs.py b/source/remediation_runbooks/scripts/EnableVPCFlowLogs.py index beee4a60..5ac053b3 100644 --- a/source/remediation_runbooks/scripts/EnableVPCFlowLogs.py +++ b/source/remediation_runbooks/scripts/EnableVPCFlowLogs.py @@ -1,40 +1,47 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import boto3 import time + +import boto3 from botocore.config import Config from botocore.exceptions import ClientError + def connect_to_logs(boto_config): - return boto3.client('logs', config=boto_config) + return boto3.client("logs", config=boto_config) + def connect_to_ec2(boto_config): - return boto3.client('ec2', config=boto_config) + return boto3.client("ec2", config=boto_config) + def log_group_exists(client, group): try: - log_group_verification = client.describe_log_groups( - logGroupNamePrefix=group - )['logGroups'] + log_group_verification = client.describe_log_groups(logGroupNamePrefix=group)[ + "logGroups" + ] if len(log_group_verification) >= 1: for existing_loggroup in log_group_verification: - if existing_loggroup['logGroupName'] == group: + if existing_loggroup["logGroupName"] == group: return 1 return 0 except Exception as e: - exit(f'EnableVPCFlowLogs failed - unhandled exception {str(e)}') + exit(f"EnableVPCFlowLogs failed - unhandled exception {str(e)}") + def wait_for_seconds(wait_interval): time.sleep(wait_interval) + def wait_for_loggroup(client, wait_interval, max_retries, loggroup): attempts = 1 while not log_group_exists(client, loggroup): wait_for_seconds(wait_interval) attempts += 1 if attempts > max_retries: - exit(f'Timeout waiting for log group {loggroup} to become active') + exit(f"Timeout waiting for log group {loggroup} to become active") + def flowlogs_active(client, loggroup): # searches for flow log status, filtered on unique CW Log Group created earlier @@ -42,19 +49,17 @@ def flowlogs_active(client, loggroup): flow_status = client.describe_flow_logs( DryRun=False, Filters=[ - { - 'Name': 'log-group-name', - 'Values': [loggroup] - }, - ] - )['FlowLogs'] - if len(flow_status) == 1 and flow_status[0]['FlowLogStatus'] == 'ACTIVE': + {"Name": "log-group-name", "Values": [loggroup]}, + ], + )["FlowLogs"] + if len(flow_status) == 1 and flow_status[0]["FlowLogStatus"] == "ACTIVE": return 1 else: return 0 except Exception as e: - exit(f'EnableVPCFlowLogs failed - unhandled exception {str(e)}') + exit(f"EnableVPCFlowLogs failed - unhandled exception {str(e)}") + def wait_for_flowlogs(client, wait_interval, max_retries, loggroup): attempts = 1 @@ -62,7 +67,10 @@ def wait_for_flowlogs(client, wait_interval, max_retries, loggroup): wait_for_seconds(wait_interval) attempts += 1 if attempts > max_retries: - exit(f'Timeout waiting for flowlogs to log group {loggroup} to become active') + exit( + f"Timeout waiting for flowlogs to log group {loggroup} to become active" + ) + def enable_flow_logs(event, _): """ @@ -70,43 +78,42 @@ def enable_flow_logs(event, _): On success returns a string map On failure returns NoneType """ - max_retries = event.get('retries', 12) # max number of waits for actions to complete. - wait_interval = event.get('wait', 5) # how many seconds between attempts - - boto_config_args = { - 'retries': { - 'mode': 'standard' - } - } + max_retries = event.get( + "retries", 12 + ) # max number of waits for actions to complete. + wait_interval = event.get("wait", 5) # how many seconds between attempts - boto_config = Config(**boto_config_args) + boto_config = Config(retries={"mode": "standard"}) - if 'vpc' not in event or 'remediation_role' not in event or 'kms_key_arn' not in event: - exit('Error: missing vpc from input') + if ( + "vpc" not in event + or "remediation_role" not in event + or "kms_key_arn" not in event + ): + exit("Error: missing vpc from input") logs_client = connect_to_logs(boto_config) ec2_client = connect_to_ec2(boto_config) - kms_key_arn = event['kms_key_arn'] # for logs encryption at rest + kms_key_arn = event["kms_key_arn"] # for logs encryption at rest # set dynamic variable for CW Log Group for VPC Flow Logs - vpc_flow_loggroup = "VPCFlowLogs/" + event['vpc'] + vpc_flow_loggroup = "VPCFlowLogs/" + event["vpc"] # create cloudwatch log group try: logs_client.create_log_group( - logGroupName=vpc_flow_loggroup, - kmsKeyId=kms_key_arn + logGroupName=vpc_flow_loggroup, kmsKeyId=kms_key_arn ) except ClientError as client_error: - exception_type = client_error.response['Error']['Code'] + exception_type = client_error.response["Error"]["Code"] if exception_type in ["ResourceAlreadyExistsException"]: - print(f'CloudWatch Logs group {vpc_flow_loggroup} already exists') + print(f"CloudWatch Logs group {vpc_flow_loggroup} already exists") else: - exit(f'ERROR CREATING LOGGROUP {vpc_flow_loggroup}: {str(exception_type)}') + exit(f"ERROR CREATING LOGGROUP {vpc_flow_loggroup}: {str(exception_type)}") except Exception as e: - exit(f'ERROR CREATING LOGGROUP {vpc_flow_loggroup}: {str(e)}') + exit(f"ERROR CREATING LOGGROUP {vpc_flow_loggroup}: {str(e)}") # wait for CWL creation to propagate wait_for_loggroup(logs_client, wait_interval, max_retries, vpc_flow_loggroup) @@ -115,27 +122,27 @@ def enable_flow_logs(event, _): try: ec2_client.create_flow_logs( DryRun=False, - DeliverLogsPermissionArn=event['remediation_role'], + DeliverLogsPermissionArn=event["remediation_role"], LogGroupName=vpc_flow_loggroup, - ResourceIds=[event['vpc']], - ResourceType='VPC', - TrafficType='REJECT', - LogDestinationType='cloud-watch-logs' + ResourceIds=[event["vpc"]], + ResourceType="VPC", + TrafficType="REJECT", + LogDestinationType="cloud-watch-logs", ) except ClientError as client_error: - exception_type = client_error.response['Error']['Code'] + exception_type = client_error.response["Error"]["Code"] if exception_type in ["FlowLogAlreadyExists"]: return { "response": { "message": f'VPC Flow Logs for {event["vpc"]} already enabled', - "status": "Success" + "status": "Success", } } else: - exit(f'ERROR CREATING LOGGROUP {vpc_flow_loggroup}: {str(exception_type)}') + exit(f"ERROR CREATING LOGGROUP {vpc_flow_loggroup}: {str(exception_type)}") except Exception as e: - exit(f'create_flow_logs failed {str(e)}') + exit(f"create_flow_logs failed {str(e)}") # wait for Flow Log creation to propagate. Exits on timeout (no need to check results) wait_for_flowlogs(ec2_client, wait_interval, max_retries, vpc_flow_loggroup) @@ -144,6 +151,6 @@ def enable_flow_logs(event, _): return { "response": { "message": f'VPC Flow Logs enabled for {event["vpc"]} to {vpc_flow_loggroup}', - "status": "Success" + "status": "Success", } } diff --git a/source/remediation_runbooks/scripts/GetPublicEBSSnapshots.py b/source/remediation_runbooks/scripts/GetPublicEBSSnapshots.py index 3ce10a0b..9cf84768 100644 --- a/source/remediation_runbooks/scripts/GetPublicEBSSnapshots.py +++ b/source/remediation_runbooks/scripts/GetPublicEBSSnapshots.py @@ -1,68 +1,62 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import json import boto3 from botocore.config import Config -from botocore.exceptions import ClientError -boto_config = Config( - retries = { - 'mode': 'standard', - 'max_attempts': 10 - } - ) +boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) + def connect_to_ec2(boto_config): - return boto3.client('ec2', config=boto_config) + return boto3.client("ec2", config=boto_config) + def get_public_snapshots(event, _): - account_id = event['account_id'] + account_id = event["account_id"] - if 'testmode' in event and event['testmode']: + if "testmode" in event and event["testmode"]: return [ "snap-12341234123412345", "snap-12341234123412345", "snap-12341234123412345", "snap-12341234123412345", - "snap-12341234123412345" + "snap-12341234123412345", ] return list_public_snapshots(account_id) + def list_public_snapshots(account_id): ec2 = connect_to_ec2(boto_config) - control_token = 'start' + control_token = "start" try: - public_snapshot_ids = [] while control_token: - - if control_token == 'start': # needed a value to start the loop. Now reset it - control_token = '' + if ( + control_token == "start" + ): # needed a value to start the loop. Now reset it + control_token = "" kwargs = { - 'MaxResults': 100, - 'OwnerIds': [ account_id ], - 'RestorableByUserIds': [ 'all' ] + "MaxResults": 100, + "OwnerIds": [account_id], + "RestorableByUserIds": ["all"], } if control_token: - kwargs['NextToken'] = control_token + kwargs["NextToken"] = control_token - response = ec2.describe_snapshots( - **kwargs - ) + response = ec2.describe_snapshots(**kwargs) - for snapshot in response['Snapshots']: - public_snapshot_ids.append(snapshot['SnapshotId']) + for snapshot in response["Snapshots"]: + public_snapshot_ids.append(snapshot["SnapshotId"]) - if 'NextToken' in response: - control_token = response['NextToken'] + if "NextToken" in response: + control_token = response["NextToken"] else: - control_token = '' + control_token = "" return public_snapshot_ids except Exception as e: print(e) - exit('Failed to describe_snapshots') + exit("Failed to describe_snapshots") diff --git a/source/remediation_runbooks/scripts/MakeEBSSnapshotsPrivate.py b/source/remediation_runbooks/scripts/MakeEBSSnapshotsPrivate.py index eef7ce37..0b4fd8ac 100644 --- a/source/remediation_runbooks/scripts/MakeEBSSnapshotsPrivate.py +++ b/source/remediation_runbooks/scripts/MakeEBSSnapshotsPrivate.py @@ -1,52 +1,47 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import json + import boto3 from botocore.config import Config -from botocore.exceptions import ClientError + def connect_to_ec2(boto_config): - return boto3.client('ec2', config=boto_config) + return boto3.client("ec2", config=boto_config) + def make_snapshots_private(event, _): - boto_config = Config( - retries = { - 'mode': 'standard', - 'max_attempts': 10 - } - ) + boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) ec2 = connect_to_ec2(boto_config) remediated = [] - snapshots = event['snapshots'] + snapshots = event["snapshots"] success_count = 0 for snapshot_id in snapshots: try: ec2.modify_snapshot_attribute( - Attribute='CreateVolumePermission', - CreateVolumePermission={ - 'Remove': [{'Group': 'all'}] - }, - SnapshotId=snapshot_id + Attribute="CreateVolumePermission", + CreateVolumePermission={"Remove": [{"Group": "all"}]}, + SnapshotId=snapshot_id, ) - print(f'Snapshot {snapshot_id} permissions set to private') + print(f"Snapshot {snapshot_id} permissions set to private") remediated.append(snapshot_id) success_count += 1 except Exception as e: print(e) - print(f'FAILED to remediate Snapshot {snapshot_id}') + print(f"FAILED to remediate Snapshot {snapshot_id}") - result=json.dumps(ec2.describe_snapshots( - SnapshotIds=remediated - ), indent=2, default=str) + result = json.dumps( + ec2.describe_snapshots(SnapshotIds=remediated), indent=2, default=str + ) print(result) return { "response": { - "message": f'{success_count} of {len(snapshots)} Snapshot permissions set to private', - "status": "Success" + "message": f"{success_count} of {len(snapshots)} Snapshot permissions set to private", + "status": "Success", } } diff --git a/source/remediation_runbooks/scripts/MakeRDSSnapshotPrivate.py b/source/remediation_runbooks/scripts/MakeRDSSnapshotPrivate.py index dc85923b..0913fc5a 100644 --- a/source/remediation_runbooks/scripts/MakeRDSSnapshotPrivate.py +++ b/source/remediation_runbooks/scripts/MakeRDSSnapshotPrivate.py @@ -1,45 +1,40 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import json import boto3 from botocore.config import Config -from botocore.exceptions import ClientError + def connect_to_rds(): - boto_config = Config( - retries ={ - 'mode': 'standard' - } - ) - return boto3.client('rds', config=boto_config) + boto_config = Config(retries={"mode": "standard"}) + return boto3.client("rds", config=boto_config) -def make_snapshot_private(event, _): +def make_snapshot_private(event, _): rds_client = connect_to_rds() - snapshot_id = event['DBSnapshotId'] - snapshot_type = event['DBSnapshotType'] + snapshot_id = event["DBSnapshotId"] + snapshot_type = event["DBSnapshotType"] try: - if (snapshot_type == 'snapshot'): + if snapshot_type == "snapshot": rds_client.modify_db_snapshot_attribute( DBSnapshotIdentifier=snapshot_id, - AttributeName='restore', - ValuesToRemove=['all'] + AttributeName="restore", + ValuesToRemove=["all"], ) - elif (snapshot_type == 'cluster-snapshot'): + elif snapshot_type == "cluster-snapshot": rds_client.modify_db_cluster_snapshot_attribute( DBClusterSnapshotIdentifier=snapshot_id, - AttributeName='restore', - ValuesToRemove=['all'] + AttributeName="restore", + ValuesToRemove=["all"], ) else: - exit(f'Unrecognized snapshot_type {snapshot_type}') + exit(f"Unrecognized snapshot_type {snapshot_type}") - print(f'Remediation completed: {snapshot_id} public access removed.') + print(f"Remediation completed: {snapshot_id} public access removed.") return { "response": { - "message": f'Snapshot {snapshot_id} permissions set to private', - "status": "Success" + "message": f"Snapshot {snapshot_id} permissions set to private", + "status": "Success", } } except Exception as e: - exit(f'Remediation failed for {snapshot_id}: {str(e)}') + exit(f"Remediation failed for {snapshot_id}: {str(e)}") diff --git a/source/remediation_runbooks/scripts/PutS3BucketPolicyDeny.py b/source/remediation_runbooks/scripts/PutS3BucketPolicyDeny.py index 2c72d78e..b7791fc2 100644 --- a/source/remediation_runbooks/scripts/PutS3BucketPolicyDeny.py +++ b/source/remediation_runbooks/scripts/PutS3BucketPolicyDeny.py @@ -1,6 +1,6 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -''' +""" Given a bucket name and list of "sensitive" IAM permissions that shall not be allowed cross-account, create an explicit deny policy for all cross-account principals, denying access to all IAM permissions in the deny list for all @@ -8,25 +8,29 @@ Note: - The deny list is a comma-separated list configured on the Config rule in parameter blacklistedActionPattern -''' +""" +import copy import json +from typing import Any, Dict + import boto3 -import copy from botocore.config import Config -from botocore.exceptions import ClientError -BOTO_CONFIG = Config( - retries = { - 'mode': 'standard', - 'max_attempts': 10 - } - ) +BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + def connect_to_s3(): - return boto3.client('s3', config=BOTO_CONFIG) + return boto3.client("s3", config=BOTO_CONFIG) + def get_partition(): - return boto3.client('sts', config=BOTO_CONFIG).get_caller_identity().get('Arn').split(':')[1] + return ( + boto3.client("sts", config=BOTO_CONFIG) + .get_caller_identity() + .get("Arn") + .split(":")[1] + ) + class BucketToRemediate: def __init__(self, bucket_name): @@ -44,73 +48,92 @@ def get_partition_where_running(self): self.partition = get_partition() def set_account_id_from_event(self, event): - self.account_id = event.get('accountid') or exit('AWS Account not specified') + self.account_id = event.get("accountid") or exit("AWS Account not specified") def set_denylist_from_event(self, event): - self.denylist = event.get('denylist').split(',') or exit('DenyList is empty or not a comma-delimited string') # Expect a comma seperated list in a string + self.denylist = event.get("denylist").split(",") or exit( + "DenyList is empty or not a comma-delimited string" + ) # Expect a comma seperated list in a string def get_current_bucket_policy(self): try: - self.bucket_policy = connect_to_s3().get_bucket_policy( - Bucket=self.bucket_name, - ExpectedBucketOwner=self.account_id - ).get('Policy') + self.bucket_policy = ( + connect_to_s3() + .get_bucket_policy( + Bucket=self.bucket_name, ExpectedBucketOwner=self.account_id + ) + .get("Policy") + ) except Exception as e: print(e) - exit(f'Failed to retrieve the bucket policy: {self.account_id} {self.bucket_name}') + exit( + f"Failed to retrieve the bucket policy: {self.account_id} {self.bucket_name}" + ) def update_bucket_policy(self): try: connect_to_s3().put_bucket_policy( Bucket=self.bucket_name, ExpectedBucketOwner=self.account_id, - Policy=self.bucket_policy + Policy=self.bucket_policy, ) except Exception as e: print(e) - exit(f'Failed to store the new bucket policy: {self.account_id} {self.bucket_name}') + exit( + f"Failed to store the new bucket policy: {self.account_id} {self.bucket_name}" + ) def __principal_is_asterisk(self, principals): - return (True if isinstance(principals, str) and principals == '*' else False) + return True if isinstance(principals, str) and principals == "*" else False def get_account_principals_from_bucket_policy_statement(self, statement_principals): aws_account_principals = [] for principal_type, principal in statement_principals.items(): - if principal_type != 'AWS': - continue # not an AWS account - aws_account_principals = principal if isinstance(principal, list) else [ principal ] + if principal_type != "AWS": + continue # not an AWS account + aws_account_principals = ( + principal if isinstance(principal, list) else [principal] + ) return aws_account_principals def create_explicit_deny_in_bucket_policy(self): - new_bucket_policy = json.loads(self.bucket_policy) + new_bucket_policy = json.loads(self.bucket_policy) # type: ignore[arg-type] deny_statement = DenyStatement(self) - for statement in new_bucket_policy['Statement']: - principals = statement.get('Principal', None) + for statement in new_bucket_policy["Statement"]: + principals = statement.get("Principal", None) if principals and not self.__principal_is_asterisk(principals): - account_principals = self.get_account_principals_from_bucket_policy_statement(copy.deepcopy(principals)) - deny_statement.add_next_principal_to_deny(account_principals, self.account_id) - - if deny_statement.deny_statement_json: - new_bucket_policy['Statement'].append(deny_statement.deny_statement_json) + account_principals = ( + self.get_account_principals_from_bucket_policy_statement( + copy.deepcopy(principals) + ) + ) + deny_statement.add_next_principal_to_deny( + account_principals, self.account_id + ) + + if ( + deny_statement.deny_statement_json + and len(deny_statement.deny_statement_json["Principal"]["AWS"]) > 0 + ): + new_bucket_policy["Statement"].append(deny_statement.deny_statement_json) self.bucket_policy = json.dumps(new_bucket_policy) return True + class DenyStatement: def __init__(self, bucket_object): self.bucket_object = bucket_object self.initialize_deny_statement() def initialize_deny_statement(self): - self.deny_statement_json = {} + self.deny_statement_json: Dict[str, Any] = {} self.deny_statement_json["Effect"] = "Deny" - self.deny_statement_json["Principal"] = { - "AWS": [] - } + self.deny_statement_json["Principal"] = {"AWS": []} self.deny_statement_json["Action"] = self.bucket_object.denylist self.deny_statement_json["Resource"] = [ - f'arn:{self.bucket_object.partition}:s3:::{self.bucket_object.bucket_name}', - f'arn:{self.bucket_object.partition}:s3:::{self.bucket_object.bucket_name}/*', + f"arn:{self.bucket_object.partition}:s3:::{self.bucket_object.bucket_name}", + f"arn:{self.bucket_object.partition}:s3:::{self.bucket_object.bucket_name}/*", ] def __str__(self): @@ -120,7 +143,7 @@ def add_next_principal_to_deny(self, principals_to_deny, bucket_account): if len(principals_to_deny) == 0: return this_principal = principals_to_deny.pop() - principal_account = this_principal.split(':')[4] + principal_account = this_principal.split(":")[4] if principal_account and principal_account != bucket_account: self.add_deny_principal(this_principal) @@ -130,13 +153,10 @@ def add_deny_principal(self, principal_arn): if principal_arn not in self.deny_statement_json["Principal"]["AWS"]: self.deny_statement_json["Principal"]["AWS"].append(principal_arn) - def add_deny_resource(self, resource_arn): - if self.deny_statement_json["Resource"] and resource_arn not in self.deny_statement_json.Resource: - self.deny_statement_json["Resource"].append(resource_arn) def update_bucket_policy(event, _): def __get_bucket_from_event(event): - bucket = event.get('bucket') or exit('Bucket not specified') + bucket = event.get("bucket") or exit("Bucket not specified") return bucket bucket_to_update = BucketToRemediate(__get_bucket_from_event(event)) @@ -146,4 +166,6 @@ def __get_bucket_from_event(event): if bucket_to_update.create_explicit_deny_in_bucket_policy(): bucket_to_update.update_bucket_policy() else: - exit(f'Unable to create an explicit deny statement for {bucket_to_update.bucket_name}') + exit( + f"Unable to create an explicit deny statement for {bucket_to_update.bucket_name}" + ) diff --git a/source/remediation_runbooks/scripts/RemoveLambdaPublicAccess.py b/source/remediation_runbooks/scripts/RemoveLambdaPublicAccess.py index c703d045..aa24eda8 100644 --- a/source/remediation_runbooks/scripts/RemoveLambdaPublicAccess.py +++ b/source/remediation_runbooks/scripts/RemoveLambdaPublicAccess.py @@ -1,83 +1,96 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import json + import boto3 from botocore.config import Config from botocore.exceptions import ClientError -boto_config = Config( - retries = { - 'mode': 'standard', - 'max_attempts': 10 - } -) +boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) + def connect_to_lambda(boto_config): - return boto3.client('lambda', config=boto_config) + return boto3.client("lambda", config=boto_config) + def print_policy_before(policy): - print('Resource Policy to be deleted:') + print("Resource Policy to be deleted:") print(json.dumps(policy, indent=2, default=str)) + def public_s3_statement_check(statement, principal): """ This function checks if the user has given access to an S3 bucket without providing an AWS account. """ try: empty_source_account_check = False - if ("StringEquals" in statement["Condition"]): - empty_source_account_check = ("AWS:SourceAccount" not in statement["Condition"]["StringEquals"]) + if "StringEquals" in statement["Condition"]: + empty_source_account_check = ( + "AWS:SourceAccount" not in statement["Condition"]["StringEquals"] + ) else: empty_source_account_check = True - return principal.get("Service", "") == "s3.amazonaws.com" and empty_source_account_check + return ( + principal.get("Service", "") == "s3.amazonaws.com" + and empty_source_account_check + ) except KeyError: return principal.get("Service", "") == "s3.amazonaws.com" + def remove_resource_policy(functionname, sid, client): try: - client.remove_permission( - FunctionName=functionname, - StatementId=sid - ) - print(f'SID {sid} removed from Lambda function {functionname}') + client.remove_permission(FunctionName=functionname, StatementId=sid) + print(f"SID {sid} removed from Lambda function {functionname}") except Exception as e: - exit(f'FAILED: SID {sid} was NOT removed from Lambda function {functionname} - {str(e)}') + exit( + f"FAILED: SID {sid} was NOT removed from Lambda function {functionname} - {str(e)}" + ) + def remove_public_statement(client, functionname, statement, principal): - if principal == "*" or (isinstance(principal, dict) and (principal.get("AWS","") == "*" or public_s3_statement_check(statement, principal))): + if principal == "*" or ( + isinstance(principal, dict) + and ( + principal.get("AWS", "") == "*" + or public_s3_statement_check(statement, principal) + ) + ): print_policy_before(statement) - remove_resource_policy(functionname, statement['Sid'], client) + remove_resource_policy(functionname, statement["Sid"], client) -def remove_lambda_public_access(event, _): +def remove_lambda_public_access(event, _): client = connect_to_lambda(boto_config) - functionname = event['FunctionName'] + functionname = event["FunctionName"] try: response = client.get_policy(FunctionName=functionname) - policy = response['Policy'] + policy = response["Policy"] policy_json = json.loads(policy) - statements = policy_json['Statement'] + statements = policy_json["Statement"] - print('Scanning for public resource policies in ' + functionname) + print("Scanning for public resource policies in " + functionname) for statement in statements: - remove_public_statement(client, functionname, statement, statement['Principal']) + remove_public_statement( + client, functionname, statement, statement["Principal"] + ) client.get_policy(FunctionName=functionname) verify(functionname) except ClientError as ex: - exception_type = ex.response['Error']['Code'] - if exception_type in ['ResourceNotFoundException']: + exception_type = ex.response["Error"]["Code"] + if exception_type in ["ResourceNotFoundException"]: print("Remediation completed. Resource policy is now empty.") else: - exit(f'ERROR: Remediation failed for RemoveLambdaPublicAccess: {str(ex)}') + exit(f"ERROR: Remediation failed for RemoveLambdaPublicAccess: {str(ex)}") except Exception as e: - exit(f'ERROR: Remediation failed for RemoveLambdaPublicAccess: {str(e)}') + exit(f"ERROR: Remediation failed for RemoveLambdaPublicAccess: {str(e)}") -def verify(function_name_to_check): +def verify(function_name_to_check): client = connect_to_lambda(boto_config) try: @@ -87,10 +100,10 @@ def verify(function_name_to_check): print(json.dumps(response, indent=2, default=str)) except ClientError as ex: - exception_type = ex.response['Error']['Code'] - if exception_type in ['ResourceNotFoundException']: + exception_type = ex.response["Error"]["Code"] + if exception_type in ["ResourceNotFoundException"]: print("Remediation completed. Resource policy is now empty.") else: - exit(f'ERROR: {exception_type} on get_policy') + exit(f"ERROR: {exception_type} on get_policy") except Exception as e: - exit(f'Exception while retrieving lambda function policy: {str(e)}') + exit(f"Exception while retrieving lambda function policy: {str(e)}") diff --git a/source/remediation_runbooks/scripts/RemoveUnusedSecret.py b/source/remediation_runbooks/scripts/RemoveUnusedSecret.py new file mode 100644 index 00000000..97801239 --- /dev/null +++ b/source/remediation_runbooks/scripts/RemoveUnusedSecret.py @@ -0,0 +1,51 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +from datetime import datetime, timezone + +import boto3 +from botocore.config import Config + +BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + +# Current date in the same format SecretsManager tracks LastAccessedDate +DATE_TODAY = datetime.now().replace( + hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc +) + + +def connect_to_secretsmanager(): + return boto3.client("secretsmanager", config=BOTO_CONFIG) + + +def lambda_handler(event, _): + secret_arn = event["SecretARN"] + unused_for_days = event["UnusedForDays"] + + secretsmanager = connect_to_secretsmanager() + + # Describe the secret + response = secretsmanager.describe_secret(SecretId=secret_arn) + + # Confirm the secret has been unused for more days than UnusedForDays parameter specifies + if "LastAccessedDate" in response and ( + DATE_TODAY - response["LastAccessedDate"] + ).days > int(unused_for_days): + # Delete the secret, with 30 day recovery window + response = secretsmanager.delete_secret( + SecretId=secret_arn, + RecoveryWindowInDays=30, + ) + + # Confirm secret was scheduled for deletion + if "DeletionDate" in response: + return { + "message": "Deleted the unused secret.", + "status": "Success", + } + else: + exit(f"Failed to delete the unused secret: {secret_arn}") + + exit( + f"The secret {secret_arn} cannot be deleted because it has been accessed within the past {unused_for_days} days." + ) diff --git a/source/remediation_runbooks/scripts/ReplaceCodeBuildClearTextCredentials.py b/source/remediation_runbooks/scripts/ReplaceCodeBuildClearTextCredentials.py index 2816f60c..5ae5b496 100644 --- a/source/remediation_runbooks/scripts/ReplaceCodeBuildClearTextCredentials.py +++ b/source/remediation_runbooks/scripts/ReplaceCodeBuildClearTextCredentials.py @@ -1,114 +1,127 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 +import re from json import dumps + from boto3 import client from botocore.config import Config from botocore.exceptions import ClientError -import re -boto_config = Config(retries = {'mode': 'standard'}) +boto_config = Config(retries={"mode": "standard"}) + +CREDENTIAL_NAMES_UPPER = ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"] -CREDENTIAL_NAMES_UPPER = [ - 'AWS_ACCESS_KEY_ID', - 'AWS_SECRET_ACCESS_KEY' -] def connect_to_ssm(boto_config): - return client('ssm', config = boto_config) + return client("ssm", config=boto_config) + def connect_to_iam(boto_config): - return client('iam', config = boto_config) + return client("iam", config=boto_config) + def is_clear_text_credential(env_var): - if env_var.get('type') != 'PLAINTEXT': + if env_var.get("type") != "PLAINTEXT": return False - return any(env_var.get('name').upper() == credential_name for credential_name in CREDENTIAL_NAMES_UPPER) + return any( + env_var.get("name").upper() == credential_name + for credential_name in CREDENTIAL_NAMES_UPPER + ) + def get_project_ssm_namespace(project_name): - return f'/CodeBuild/{ project_name }' + return f"/CodeBuild/{ project_name }" + def create_parameter(project_name, env_var): - env_var_name = env_var.get('name') - parameter_name = f'{ get_project_ssm_namespace(project_name) }/env/{ env_var_name }' + env_var_name = env_var.get("name") + parameter_name = f"{ get_project_ssm_namespace(project_name) }/env/{ env_var_name }" ssm_client = connect_to_ssm(boto_config) try: response = ssm_client.put_parameter( - Name = parameter_name, - Description = 'Automatically created by ASR', - Value = env_var.get("value"), - Type = 'SecureString', - Overwrite = False, - DataType = 'text' + Name=parameter_name, + Description="Automatically created by ASR", + Value=env_var.get("value"), + Type="SecureString", + Overwrite=False, + DataType="text", ) except ClientError as client_exception: - exception_type = client_exception.response['Error']['Code'] - if exception_type == 'ParameterAlreadyExists': - print(f'Parameter { parameter_name } already exists. This remediation may have been run before.') - print('Ignoring exception - remediation continues.') + exception_type = client_exception.response["Error"]["Code"] + if exception_type == "ParameterAlreadyExists": + print( + f"Parameter { parameter_name } already exists. This remediation may have been run before." + ) + print("Ignoring exception - remediation continues.") response = None else: - exit(f'ERROR: Unhandled client exception: { client_exception }') + exit(f"ERROR: Unhandled client exception: { client_exception }") except Exception as e: - exit(f'ERROR: could not create SSM parameter { parameter_name }: { str(e) }') + exit(f"ERROR: could not create SSM parameter { parameter_name }: { str(e) }") return response, parameter_name + def create_policy(region, account, partition, project_name): iam_client = connect_to_iam(boto_config) - policy_resource_filter = f'arn:{ partition }:ssm:{ region }:{ account }:parameter{ get_project_ssm_namespace(project_name) }/*' + policy_resource_filter = f"arn:{ partition }:ssm:{ region }:{ account }:parameter{ get_project_ssm_namespace(project_name) }/*" policy_document = { - 'Version': '2012-10-17', - 'Statement': [ + "Version": "2012-10-17", + "Statement": [ { - 'Effect': 'Allow', - 'Action': [ - 'ssm:GetParameter', - 'ssm:GetParameters' - ], - 'Resource': policy_resource_filter + "Effect": "Allow", + "Action": ["ssm:GetParameter", "ssm:GetParameters"], + "Resource": policy_resource_filter, } - ] + ], } - policy_name = f'CodeBuildSSMParameterPolicy-{ project_name }-{ region }' + policy_name = f"CodeBuildSSMParameterPolicy-{ project_name }-{ region }" try: response = iam_client.create_policy( - Description = "Automatically created by ASR", - PolicyDocument = dumps(policy_document), - PolicyName = policy_name + Description="Automatically created by ASR", + PolicyDocument=dumps(policy_document), + PolicyName=policy_name, ) except ClientError as client_exception: - exception_type = client_exception.response['Error']['Code'] - if exception_type == 'EntityAlreadyExists': - print(f'Policy { "" } already exists. This remediation may have been run before.') - print('Ignoring exception - remediation continues.') + exception_type = client_exception.response["Error"]["Code"] + if exception_type == "EntityAlreadyExists": + print( + f'Policy { "" } already exists. This remediation may have been run before.' + ) + print("Ignoring exception - remediation continues.") # Attach needs to know the ARN of the created policy response = { - 'Policy': { - 'Arn': f'arn:{ partition }:iam::{ account }:policy/{ policy_name }' + "Policy": { + "Arn": f"arn:{ partition }:iam::{ account }:policy/{ policy_name }" } } else: - exit(f'ERROR: Unhandled client exception: { client_exception }') + exit(f"ERROR: Unhandled client exception: { client_exception }") except Exception as e: - exit(f'ERROR: could not create access policy { policy_name }: { str(e) }') + exit(f"ERROR: could not create access policy { policy_name }: { str(e) }") return response + def attach_policy(policy_arn, service_role_name): iam_client = connect_to_iam(boto_config) try: response = iam_client.attach_role_policy( - PolicyArn = policy_arn, - RoleName = service_role_name + PolicyArn=policy_arn, RoleName=service_role_name ) except ClientError as client_exception: - exit(f'ERROR: Unhandled client exception: { client_exception }') + exit(f"ERROR: Unhandled client exception: { client_exception }") except Exception as e: - exit(f'ERROR: could not attach policy { policy_arn } to role { service_role_name }: { str(e) }') + exit( + f"ERROR: could not attach policy { policy_arn } to role { service_role_name }: { str(e) }" + ) return response + def parse_project_arn(arn): - pattern = re.compile(r'arn:(aws[a-zA-Z-]*):codebuild:([a-z]{2}(?:-gov)?-[a-z]+-\d):(\d{12}):project/[A-Za-z0-9][A-Za-z0-9\-_]{1,254}$') + pattern = re.compile( + r"arn:(aws[a-zA-Z-]*):codebuild:([a-z]{2}(?:-gov)?-[a-z]+-\d):(\d{12}):project/[A-Za-z0-9][A-Za-z0-9\-_]{1,254}$" + ) match = pattern.match(arn) if match: partition = match.group(1) @@ -118,21 +131,22 @@ def parse_project_arn(arn): else: raise ValueError + def replace_credentials(event, _): - project_info = event.get('ProjectInfo') - project_name = project_info.get('name') - project_env = project_info.get('environment') - project_env_vars = project_env.get('environmentVariables') + project_info = event.get("ProjectInfo") + project_name = project_info.get("name") + project_env = project_info.get("environment") + project_env_vars = project_env.get("environmentVariables") updated_project_env_vars = [] parameters = [] for env_var in project_env_vars: - if (is_clear_text_credential(env_var)): + if is_clear_text_credential(env_var): parameter_response, parameter_name = create_parameter(project_name, env_var) updated_env_var = { - 'name': env_var.get('name'), - 'type': 'PARAMETER_STORE', - 'value': parameter_name + "name": env_var.get("name"), + "type": "PARAMETER_STORE", + "value": parameter_name, } updated_project_env_vars.append(updated_env_var) parameters.append(parameter_response) @@ -140,23 +154,23 @@ def replace_credentials(event, _): updated_project_env_vars.append(env_var) updated_project_env = project_env - updated_project_env['environmentVariables'] = updated_project_env_vars + updated_project_env["environmentVariables"] = updated_project_env_vars - partition, region, account = parse_project_arn(project_info.get('arn')) + partition, region, account = parse_project_arn(project_info.get("arn")) policy = create_policy(region, account, partition, project_name) - service_role_arn = project_info.get('serviceRole') - service_role_name = service_role_arn[service_role_arn.rfind('/') + 1:] - attach_response = attach_policy(policy['Policy']['Arn'], service_role_name) + service_role_arn = project_info.get("serviceRole") + service_role_name = service_role_arn[service_role_arn.rfind("/") + 1 :] + attach_response = attach_policy(policy["Policy"]["Arn"], service_role_name) # datetimes are not serializable, so convert them to ISO 8601 strings - policy_datetime_keys = ['CreateDate', 'UpdateDate'] + policy_datetime_keys = ["CreateDate", "UpdateDate"] for key in policy_datetime_keys: - if key in policy['Policy']: - policy['Policy'][key] = policy['Policy'][key].isoformat() + if key in policy["Policy"]: + policy["Policy"][key] = policy["Policy"][key].isoformat() return { - 'UpdatedProjectEnv': updated_project_env, - 'Parameters': parameters, - 'Policy': policy, - 'AttachResponse': attach_response + "UpdatedProjectEnv": updated_project_env, + "Parameters": parameters, + "Policy": policy, + "AttachResponse": attach_response, } diff --git a/source/remediation_runbooks/scripts/RevokeUnauthorizedInboundRules.py b/source/remediation_runbooks/scripts/RevokeUnauthorizedInboundRules.py new file mode 100644 index 00000000..48982c7b --- /dev/null +++ b/source/remediation_runbooks/scripts/RevokeUnauthorizedInboundRules.py @@ -0,0 +1,98 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import boto3 +from botocore.config import Config + +BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + +# IPV4 and IPV6 open access +OPENIPV4 = "0.0.0.0/0" +OPENIPV6 = "::/0" + + +def connect_to_ec2(): + return boto3.client("ec2", config=BOTO_CONFIG) + + +# Function to check if rule has open access to unauthorized ports +def check_unauthorized_ports(authorized_ports, rule): + for port in range(rule["FromPort"], rule["ToPort"] + 1): + if port not in authorized_ports: + # Check for IPV4 open access + if "CidrIpv4" in rule and rule["CidrIpv4"] == OPENIPV4: + # Return True if rule has open access to unauthorized ports + return True + + # Check for IPV6 open access + elif "CidrIpv6" in rule and rule["CidrIpv6"] == OPENIPV6: + # Return True if rule is removed + return True + + # Return False if rule does not have open access to unauthorized ports + return False + + +def lambda_handler(event, _): + # Extract Security Group ID from event + security_group_id = event["SecurityGroupId"] + authorized_tcp_ports = set(map(int, event["AuthorizedTcpPorts"])) + authorized_udp_ports = set(map(int, event["AuthorizedUdpPorts"])) + + # Connect to EC2 service + ec2 = connect_to_ec2() + + # Get the security group rules + paginator = ec2.get_paginator("describe_security_group_rules") + + security_group_rules = paginator.paginate( + Filters=[ + { + "Name": "group-id", + "Values": [ + security_group_id, + ], + }, + ], + ) + + # List to return rules that are deleted + rules_deleted = [] + + for page in security_group_rules: + for rule in page["SecurityGroupRules"]: + # Remove TCP ingress rules + if ( + rule["IpProtocol"] == "tcp" + and not rule["IsEgress"] + and check_unauthorized_ports(authorized_tcp_ports, rule) + ): + # Delete the rule + ec2.revoke_security_group_ingress( + GroupId=security_group_id, + SecurityGroupRuleIds=[ + rule["SecurityGroupRuleId"], + ], + ) + # Add rule to list of deleted rules + rules_deleted.append(rule["SecurityGroupRuleId"]) + # Remove UDP ingress rules + if ( + rule["IpProtocol"] == "udp" + and not rule["IsEgress"] + and check_unauthorized_ports(authorized_udp_ports, rule) + ): + # Delete the rule + ec2.revoke_security_group_ingress( + GroupId=security_group_id, + SecurityGroupRuleIds=[ + rule["SecurityGroupRuleId"], + ], + ) + # Add rule to list of deleted rules + rules_deleted.append(rule["SecurityGroupRuleId"]) + + return { + "message": "Successfully removed security group rules on " + security_group_id, + "status": "Success", + "rules_deleted": rules_deleted, + } diff --git a/source/remediation_runbooks/scripts/RevokeUnrotatedKeys.py b/source/remediation_runbooks/scripts/RevokeUnrotatedKeys.py index 11dd3cb5..29de75e9 100644 --- a/source/remediation_runbooks/scripts/RevokeUnrotatedKeys.py +++ b/source/remediation_runbooks/scripts/RevokeUnrotatedKeys.py @@ -1,54 +1,73 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -from datetime import datetime, timezone, timedelta +from datetime import datetime, timezone +from typing import TYPE_CHECKING, Dict, List, Literal, TypedDict + import boto3 from botocore.config import Config -boto_config = Config( - retries ={ - 'mode': 'standard' - } -) +if TYPE_CHECKING: + from mypy_boto3_iam.type_defs import EmptyResponseMetadataTypeDef +else: + EmptyResponseMetadataTypeDef = object + +boto_config = Config(retries={"mode": "standard"}) + -responses = {} +class Response(TypedDict): + AccessKeyId: str + Response: EmptyResponseMetadataTypeDef + + +responses: Dict[Literal["DeactivateUnusedKeysResponse"], List[Response]] = {} responses["DeactivateUnusedKeysResponse"] = [] + def connect_to_iam(boto_config): - return boto3.client('iam', config=boto_config) + return boto3.client("iam", config=boto_config) + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def get_user_name(resource_id): config_client = connect_to_config(boto_config) list_discovered_resources_response = config_client.list_discovered_resources( - resourceType='AWS::IAM::User', - resourceIds=[resource_id] + resourceType="AWS::IAM::User", resourceIds=[resource_id] ) - resource_name = list_discovered_resources_response.get("resourceIdentifiers")[0].get("resourceName") + resource_name = list_discovered_resources_response.get("resourceIdentifiers")[ + 0 + ].get("resourceName") return resource_name + def list_access_keys(user_name, include_inactive=False): iam_client = connect_to_iam(boto_config) active_keys = [] keys = iam_client.list_access_keys(UserName=user_name).get("AccessKeyMetadata", []) for key in keys: - if include_inactive or key.get('Status') == 'Active': + if include_inactive or key.get("Status") == "Active": active_keys.append(key) return active_keys + def deactivate_unused_keys(access_keys, max_credential_usage_age, user_name): iam_client = connect_to_iam(boto_config) for key in access_keys: print(key) - last_used = iam_client.get_access_key_last_used(AccessKeyId=key.get("AccessKeyId")).get("AccessKeyLastUsed") + last_used = iam_client.get_access_key_last_used( + AccessKeyId=key.get("AccessKeyId") + ).get("AccessKeyLastUsed") deactivate = False now = datetime.now(timezone.utc) days_since_creation = (now - key.get("CreateDate")).days last_used_days = (now - last_used.get("LastUsedDate", now)).days - print(f'Key {key.get("AccessKeyId")} is {days_since_creation} days old and last used {last_used_days} days ago') + print( + f'Key {key.get("AccessKeyId")} is {days_since_creation} days old and last used {last_used_days} days ago' + ) if days_since_creation > max_credential_usage_age: deactivate = True @@ -59,23 +78,42 @@ def deactivate_unused_keys(access_keys, max_credential_usage_age, user_name): if deactivate: deactivate_key(user_name, key.get("AccessKeyId")) + def deactivate_key(user_name, access_key): iam_client = connect_to_iam(boto_config) - responses["DeactivateUnusedKeysResponse"].append({"AccessKeyId": access_key, "Response": iam_client.update_access_key(UserName=user_name, AccessKeyId=access_key, Status="Inactive")}) + responses["DeactivateUnusedKeysResponse"].append( + { + "AccessKeyId": access_key, + "Response": iam_client.update_access_key( + UserName=user_name, AccessKeyId=access_key, Status="Inactive" + ), + } + ) + def verify_expired_credentials_revoked(responses, user_name): if responses.get("DeactivateUnusedKeysResponse"): for key in responses.get("DeactivateUnusedKeysResponse"): - key_data = next(filter(lambda x: x.get("AccessKeyId") == key.get("AccessKeyId"), list_access_keys(user_name, True))) #NOSONAR The value key should change at the next loop iteration as we're cycling through each response. + key_data = next( + filter( + lambda x: x.get("AccessKeyId") == key.get("AccessKeyId"), + list_access_keys(user_name, True), + ) + ) # NOSONAR The value key should change at the next loop iteration as we're cycling through each response. if key_data.get("Status") != "Inactive": - error_message = "VERIFICATION FAILED. ACCESS KEY {} NOT DEACTIVATED".format(key_data.get("AccessKeyId")) + error_message = ( + "VERIFICATION FAILED. ACCESS KEY {} NOT DEACTIVATED".format( + key_data.get("AccessKeyId") + ) + ) raise RuntimeError(error_message) return { "output": "Verification of unrotated access keys is successful.", - "http_responses": responses + "http_responses": responses, } + def unrotated_key_handler(event, _): user_name = get_user_name(event.get("IAMResourceId")) max_credential_usage_age = int(event.get("MaxCredentialUsageAge")) diff --git a/source/remediation_runbooks/scripts/SetCloudFrontOriginDomain.py b/source/remediation_runbooks/scripts/SetCloudFrontOriginDomain.py new file mode 100644 index 00000000..d0ed7b57 --- /dev/null +++ b/source/remediation_runbooks/scripts/SetCloudFrontOriginDomain.py @@ -0,0 +1,68 @@ +import boto3 + + +def lambda_handler(event, _): + # Initialize the CloudFront client + cloudfront_client = boto3.client("cloudfront") + + # The ID of the CloudFront distribution you want to update + distribution_id = event["Id"] + + # Intentionally invalid special-use TLD + new_origin_domain = "cloudfront12remediation.example" + + # Get the current distribution configuration + distribution_config = cloudfront_client.get_distribution_config(Id=distribution_id) + + # Update the origin domain in the distribution configuration + distribution_config["DistributionConfig"]["Origins"]["Items"][0][ + "DomainName" + ] = new_origin_domain + + # Check if distribution is enabled and disable it + if distribution_config["DistributionConfig"]["Enabled"]: + distribution_config["DistributionConfig"]["Enabled"] = False + + # If using an S3 origin type, need to update to custom origin type + if ( + "S3OriginConfig" + in distribution_config["DistributionConfig"]["Origins"]["Items"][0] + ): + # Remove S3OriginConfig key + del distribution_config["DistributionConfig"]["Origins"]["Items"][0][ + "S3OriginConfig" + ] + + # Add CustomOriginConfig key + distribution_config["DistributionConfig"]["Origins"]["Items"][0][ + "CustomOriginConfig" + ] = { + "HTTPPort": 80, + "HTTPSPort": 443, + "OriginProtocolPolicy": "http-only", + "OriginSslProtocols": {"Quantity": 1, "Items": ["TLSv1.2"]}, + "OriginReadTimeout": 30, + "OriginKeepaliveTimeout": 5, + } + + # Update the distribution configuration + cloudfront_client.update_distribution( + DistributionConfig=distribution_config["DistributionConfig"], + Id=distribution_id, + IfMatch=distribution_config["ETag"], + ) + + updated_distribution = cloudfront_client.get_distribution_config(Id=distribution_id) + updated_origin_domain = updated_distribution["DistributionConfig"]["Origins"][ + "Items" + ][0]["DomainName"] + + if updated_origin_domain == "cloudfront12remediation.example": + return { + "message": "Origin domain updated successfully.", + "status": "Success", + } + else: + raise RuntimeError( + "Failed to update the origin domain. Updated origin domain did not match 'cloudfront12remediation.example'" + ) diff --git a/source/remediation_runbooks/scripts/SetS3LifecyclePolicy.py b/source/remediation_runbooks/scripts/SetS3LifecyclePolicy.py new file mode 100644 index 00000000..503e6f4b --- /dev/null +++ b/source/remediation_runbooks/scripts/SetS3LifecyclePolicy.py @@ -0,0 +1,83 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +import boto3 +from botocore.config import Config + +BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + + +def connect_to_s3(): + return boto3.client("s3", config=BOTO_CONFIG) + + +def lambda_handler(event, _): + bucket_name = event["BucketName"] + target_transition_days = event["TargetTransitionDays"] + target_expiration_days = event["TargetExpirationDays"] + target_transition_storage_class = event["TargetTransitionStorageClass"] + rule_id = "S3.13 Remediation Example" + s3 = connect_to_s3() + + lifecycle_policy = {} + if target_expiration_days != 0: + lifecycle_policy = { + "Rules": [ + { + "ID": rule_id, + "Status": "Enabled", + "Expiration": { + "Days": target_expiration_days, + }, + "Transitions": [ + { + "Days": target_transition_days, + "StorageClass": target_transition_storage_class, + }, + ], + "Filter": { + "ObjectSizeGreaterThan": 131072, + }, + }, + ], + } + else: + lifecycle_policy = { + "Rules": [ + { + "ID": rule_id, + "Status": "Enabled", + "Transitions": [ + { + "Days": target_transition_days, + "StorageClass": target_transition_storage_class, + }, + ], + "Filter": { + "ObjectSizeGreaterThan": 131072, + }, + }, + ], + } + + # Set example lifecycle policy + # Moves objects larger than 128 KB to Intelligent Tiering storage class after 30 days + s3.put_bucket_lifecycle_configuration( + Bucket=bucket_name, LifecycleConfiguration=lifecycle_policy + ) + + # Get new lifecycle configuration + lifecycle_config = s3.get_bucket_lifecycle_configuration( + Bucket=bucket_name, + ) + + if lifecycle_config["Rules"][0]["ID"] == rule_id: + return { + "message": "Successfully set example S3 lifecycle policy. Review and update as needed.", + "status": "Success", + } + + else: + raise RuntimeError( + "Failed to set S3 lifecycle policy. Lifecycle rule ID did not match 'S3.13 Remediation Example'" + ) diff --git a/source/remediation_runbooks/scripts/SetSSLBucketPolicy.py b/source/remediation_runbooks/scripts/SetSSLBucketPolicy.py index 5fa88a16..e77fbb1d 100644 --- a/source/remediation_runbooks/scripts/SetSSLBucketPolicy.py +++ b/source/remediation_runbooks/scripts/SetSSLBucketPolicy.py @@ -1,19 +1,17 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import json + import boto3 from botocore.config import Config from botocore.exceptions import ClientError -boto_config = Config( - retries = { - 'mode': 'standard', - 'max_attempts': 10 - } - ) +boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) + def connect_to_s3(): - return boto3.client('s3', config=boto_config) + return boto3.client("s3", config=boto_config) + def policy_to_add(bucket, partition): return { @@ -21,60 +19,54 @@ def policy_to_add(bucket, partition): "Action": "s3:*", "Effect": "Deny", "Resource": [ - f'arn:{partition}:s3:::{bucket}', - f'arn:{partition}:s3:::{bucket}/*' + f"arn:{partition}:s3:::{bucket}", + f"arn:{partition}:s3:::{bucket}/*", ], - "Condition": { - "Bool": { - "aws:SecureTransport": "false" - } - }, - "Principal": "*" + "Condition": {"Bool": {"aws:SecureTransport": "false"}}, + "Principal": "*", } + + def new_policy(): - return { - "Id": "BucketPolicy", - "Version": "2012-10-17", - "Statement": [] - } + return {"Id": "BucketPolicy", "Version": "2012-10-17", "Statement": []} + def add_ssl_bucket_policy(event, _): - bucket_name = event['bucket'] - account_id = event['accountid'] - aws_partition = event['partition'] + bucket_name = event["bucket"] + account_id = event["accountid"] + aws_partition = event["partition"] s3 = connect_to_s3() bucket_policy = {} try: existing_policy = s3.get_bucket_policy( - Bucket=bucket_name, - ExpectedBucketOwner=account_id + Bucket=bucket_name, ExpectedBucketOwner=account_id ) - bucket_policy = json.loads(existing_policy['Policy']) + bucket_policy = json.loads(existing_policy["Policy"]) except ClientError as ex: - exception_type = ex.response['Error']['Code'] + exception_type = ex.response["Error"]["Code"] # delivery channel already exists - return if exception_type not in ["NoSuchBucketPolicy"]: - exit(f'ERROR: Boto3 s3 ClientError: {exception_type} - {str(ex)}') + exit(f"ERROR: Boto3 s3 ClientError: {exception_type} - {str(ex)}") except Exception as e: - exit(f'ERROR getting bucket policy for {bucket_name}: {str(e)}') + exit(f"ERROR getting bucket policy for {bucket_name}: {str(e)}") if not bucket_policy: bucket_policy = new_policy() - print(f'Existing policy: {bucket_policy}') - bucket_policy['Statement'].append(policy_to_add(bucket_name, aws_partition)) + print(f"Existing policy: {bucket_policy}") + bucket_policy["Statement"].append(policy_to_add(bucket_name, aws_partition)) try: result = s3.put_bucket_policy( Bucket=bucket_name, Policy=json.dumps(bucket_policy, indent=4, default=str), - ExpectedBucketOwner=account_id + ExpectedBucketOwner=account_id, ) print(result) except ClientError as ex: - exception_type = ex.response['Error']['Code'] - exit(f'ERROR: Boto3 s3 ClientError: {exception_type} - {str(ex)}') + exception_type = ex.response["Error"]["Code"] + exit(f"ERROR: Boto3 s3 ClientError: {exception_type} - {str(ex)}") except Exception as e: - exit(f'ERROR putting bucket policy for {bucket_name}: {str(e)}') + exit(f"ERROR putting bucket policy for {bucket_name}: {str(e)}") - print(f'New policy: {bucket_policy}') + print(f"New policy: {bucket_policy}") diff --git a/source/remediation_runbooks/scripts/UpdateSecretRotationPeriod.py b/source/remediation_runbooks/scripts/UpdateSecretRotationPeriod.py new file mode 100644 index 00000000..edfb7c11 --- /dev/null +++ b/source/remediation_runbooks/scripts/UpdateSecretRotationPeriod.py @@ -0,0 +1,66 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +import boto3 +from botocore.config import Config + +boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) + + +def connect_to_secretsmanager(): + return boto3.client("secretsmanager", config=boto_config) + + +def lambda_handler(event, _): + secret_arn = event["SecretARN"] + max_days_since_rotation = event["MaxDaysSinceRotation"] + + secretsmanager = connect_to_secretsmanager() + + try: + # Rotate secret and set rotation schedule + secretsmanager.rotate_secret( + SecretId=secret_arn, + RotationRules={ + "AutomaticallyAfterDays": max_days_since_rotation, + }, + RotateImmediately=True, + ) + + # Verify secret rotation schedule updated. + response = secretsmanager.describe_secret(SecretId=secret_arn) + + if "RotationRules" in response: + if ( + response["RotationRules"]["AutomaticallyAfterDays"] + <= max_days_since_rotation + ): + return { + "message": f"Rotated secret and set rotation schedule to {max_days_since_rotation} days.", + "status": "Success", + } + else: + return { + "message": "Failed to rotate secret and set rotation schedule.", + "status": "Failed", + } + + # If secret was already rotated, an exception will be thrown. + except Exception as e: + # Verify secret rotation schedule updated. + response = secretsmanager.describe_secret(SecretId=secret_arn) + + if "RotationRules" in response: + if ( + response["RotationRules"]["AutomaticallyAfterDays"] + <= max_days_since_rotation + ): + return { + "message": f"Set rotation schedule to {max_days_since_rotation} days. Secret is already being rotated.", + "status": "Success", + } + else: + return { + "message": f"Failed to rotate secret and set rotation schedule: {str(e)}", + "status": "Failed", + } diff --git a/source/remediation_runbooks/scripts/block_ssm_doc_public_access.py b/source/remediation_runbooks/scripts/block_ssm_doc_public_access.py new file mode 100644 index 00000000..f27388ff --- /dev/null +++ b/source/remediation_runbooks/scripts/block_ssm_doc_public_access.py @@ -0,0 +1,66 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +from typing import TypedDict + +import boto3 +from botocore.config import Config + + +class EventType(TypedDict): + accountid: str + name: str + + +boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) + + +def connect_to_ssm(): + return boto3.client("ssm", config=boto_config) + + +def lambda_handler(event: EventType, _): + """ + remediates SSM.4 by disabling public access to SSM documents + On success returns True + On failure returns NoneType + """ + + try: + document_arn = event["document_arn"] + document_name = document_arn.split("/")[1] + document_perimissions = describe_document_permissions(document_name) + if "all" in document_perimissions.get("AccountIds"): + modify_document_permissions(document_name) + else: + exit(f"No change was made to {document_name}") + + verify_document_permissions = describe_document_permissions(document_name) + + if "all" not in verify_document_permissions.get("AccountIds"): + return {"isPublic": "False"} + else: + raise RuntimeError + + except Exception as e: + exit(f"Failed to retrieve the SSM Document permission: {str(e)}") + + +def describe_document_permissions(document_name): + ssm_client = connect_to_ssm() + try: + document_permissions = ssm_client.describe_document_permission( + Name=document_name, PermissionType="Share" + ) + return document_permissions + except Exception as e: + exit(f"Failed to describe SSM Document {document_name}: {str(e)}") + + +def modify_document_permissions(document_name): + ssm_client = connect_to_ssm() + try: + ssm_client.modify_document_permission( + Name=document_name, AccountIdsToRemove=["all"], PermissionType="Share" + ) + except Exception as e: + exit(f"Failed to modify SSM Document {document_name}: {str(e)}") diff --git a/source/remediation_runbooks/scripts/configure_stack_notifications.py b/source/remediation_runbooks/scripts/configure_stack_notifications.py index e1a99bf6..7cc3033c 100644 --- a/source/remediation_runbooks/scripts/configure_stack_notifications.py +++ b/source/remediation_runbooks/scripts/configure_stack_notifications.py @@ -4,12 +4,19 @@ Configure a CloudFormation stack with an SNS topic for notifications, creating the topic if it does not already exist """ +from time import sleep, time +from typing import TYPE_CHECKING -from time import time, sleep import boto3 from botocore.config import Config -boto_config = Config(retries={ 'mode': 'standard' }) +if TYPE_CHECKING: + from mypy_boto3_sns.client import SNSClient +else: + SNSClient = object + +boto_config = Config(retries={"mode": "standard"}) + def lambda_handler(event, _): """ @@ -22,65 +29,70 @@ def lambda_handler(event, _): `context` is ignored """ - stack_arn = event['stack_arn'] - topic_name = event['topic_name'] + stack_arn = event["stack_arn"] + topic_name = event["topic_name"] topic_arn = get_or_create_topic(topic_name) configure_notifications(stack_arn, topic_arn) wait_for_update(stack_arn) return assert_stack_configured(stack_arn, topic_arn) -def get_or_create_topic(topic_name: str): + +def get_or_create_topic(topic_name: str) -> str: """Get the SQS topic arn for the given topic name, creating it if it does not already exist""" - sns = boto3.client('sns', config=boto_config) + sns: SNSClient = boto3.client("sns", config=boto_config) response = sns.create_topic(Name=topic_name) - return response['TopicArn'] + return response["TopicArn"] -def configure_notifications(stack_arn: str, topic_arn: str): + +def configure_notifications(stack_arn: str, topic_arn: str) -> None: """Configure the stack with ARN `stack_arn` to notify the queue with ARN `topic_arn`""" - cloudformation = boto3.resource('cloudformation', config=boto_config) + cloudformation = boto3.resource("cloudformation", config=boto_config) stack = cloudformation.Stack(stack_arn) - kwargs = { - 'UsePreviousTemplate': True, - 'NotificationARNs': [topic_arn]} + kwargs = {"UsePreviousTemplate": True, "NotificationARNs": [topic_arn]} if stack.parameters: - kwargs['Parameters'] = [{ - 'ParameterKey': param['ParameterKey'], - 'UsePreviousValue': True - } for param in stack.parameters] + kwargs["Parameters"] = [ + {"ParameterKey": param["ParameterKey"], "UsePreviousValue": True} + for param in stack.parameters + ] if stack.capabilities: - kwargs['Capabilities'] = stack.capabilities + kwargs["Capabilities"] = stack.capabilities stack.update(**kwargs) + class UpdateTimeoutException(Exception): """Timed out waiting for the CloudFormation stack to update""" -def wait_for_update(stack_arn: str): + +def wait_for_update(stack_arn: str) -> None: """Wait for the stack with ARN `stack_arn` to be in status `UPDATE_COMPLETE`""" wait_interval_seconds = 10 timeout_seconds = 300 start = time() - while get_stack_status(stack_arn) != 'UPDATE_COMPLETE': + while get_stack_status(stack_arn) != "UPDATE_COMPLETE": if time() - start > timeout_seconds: - raise UpdateTimeoutException('Timed out waiting for stack update') + raise UpdateTimeoutException("Timed out waiting for stack update") wait_seconds(wait_interval_seconds) wait_interval_seconds = wait_interval_seconds * 2 + def get_stack_status(stack_arn): """Get the status of the CloudFormation stack with ARN `stack_arn`""" - cloudformation = boto3.client('cloudformation', config=boto_config) + cloudformation = boto3.client("cloudformation", config=boto_config) response = cloudformation.describe_stacks(StackName=stack_arn) - return response['Stacks'][0]['StackStatus'] + return response["Stacks"][0]["StackStatus"] + def wait_seconds(seconds): """Wait for `seconds` seconds""" sleep(seconds) + def assert_stack_configured(stack_arn, topic_arn): """ Verify that the CloudFormation stack with ARN `stack_arn` is configured to update the SQS topic with ARN `topic_arn` """ - cloudformation = boto3.resource('cloudformation', config=boto_config) + cloudformation = boto3.resource("cloudformation", config=boto_config) stack = cloudformation.Stack(stack_arn) wait_interval_seconds = 10 timeout_seconds = 300 @@ -88,11 +100,13 @@ def assert_stack_configured(stack_arn, topic_arn): while stack.notification_arns != [topic_arn]: if time() - start > timeout_seconds: raise StackConfigurationFailedException( - 'Timed out waiting for stack configuration to take effect') + "Timed out waiting for stack configuration to take effect" + ) wait_seconds(wait_interval_seconds) wait_interval_seconds = wait_interval_seconds * 2 stack.reload() - return { 'NotificationARNs': stack.notification_arns } + return {"NotificationARNs": stack.notification_arns} + class StackConfigurationFailedException(Exception): """An error occurred updating the CloudFormation stack to notify the SQS topic""" diff --git a/source/remediation_runbooks/scripts/disable_publicip_auto_assign.py b/source/remediation_runbooks/scripts/disable_publicip_auto_assign.py index b2e3a76c..a8714766 100644 --- a/source/remediation_runbooks/scripts/disable_publicip_auto_assign.py +++ b/source/remediation_runbooks/scripts/disable_publicip_auto_assign.py @@ -2,59 +2,56 @@ # SPDX-License-Identifier: Apache-2.0 import boto3 from botocore.config import Config -from botocore.exceptions import ClientError - -boto_config = Config( - retries = { - 'mode': 'standard', - 'max_attempts': 10 - } - ) - - - + +boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) + + def connect_to_ec2(): - return boto3.client('ec2', config=boto_config) - + return boto3.client("ec2", config=boto_config) + + def lambda_handler(event, _): - """ - Disable public IP auto assignment on a subnet. - - `event` should have the following keys and values: - `subnet_arn`: the ARN of the subnet that has public IP auto assignment enabled. - - `context` is ignored - """ - - subnet_arn = event['subnet_arn'] - - subnet_id = subnet_arn.split('/')[1] - - disable_publicip_auto_assign(subnet_id) - - subnet_attributes = describe_subnet(subnet_id) - - public_ip_on_launch = subnet_attributes['Subnets'][0]['MapPublicIpOnLaunch'] - - if public_ip_on_launch == False: - return { - "MapPublicIpOnLaunch": public_ip_on_launch - } - - raise RuntimeError(f'ASR Remediation failed - {subnet_id} did not have public IP auto assignment turned off.') - - + """ + Disable public IP auto assignment on a subnet. + + `event` should have the following keys and values: + `subnet_arn`: the ARN of the subnet that has public IP auto assignment enabled. + + `context` is ignored + """ + + subnet_arn = event["subnet_arn"] + + subnet_id = subnet_arn.split("/")[1] + + disable_publicip_auto_assign(subnet_id) + + subnet_attributes = describe_subnet(subnet_id) + + public_ip_on_launch = subnet_attributes["Subnets"][0]["MapPublicIpOnLaunch"] + + if public_ip_on_launch is False: + return {"MapPublicIpOnLaunch": public_ip_on_launch} + + raise RuntimeError( + f"ASR Remediation failed - {subnet_id} did not have public IP auto assignment turned off." + ) + + def disable_publicip_auto_assign(subnet_id): - """ - Disables public IP Auto Assign on the subnet `subnet_id` - """ - ec2 = connect_to_ec2() - try: - ec2.modify_subnet_attribute(MapPublicIpOnLaunch={'Value':False},SubnetId=subnet_id) - - except Exception as e: - exit(f'There was an error turning off public IP auto assignment: '+str(e)) - + """ + Disables public IP Auto Assign on the subnet `subnet_id` + """ + ec2 = connect_to_ec2() + try: + ec2.modify_subnet_attribute( + MapPublicIpOnLaunch={"Value": False}, SubnetId=subnet_id + ) + + except Exception as e: + exit("There was an error turning off public IP auto assignment: " + str(e)) + + def describe_subnet(subnet_id): """ Grabs Subnet Attributes to verify subnet values were set as expected. @@ -63,6 +60,6 @@ def describe_subnet(subnet_id): try: subnet_attributes = ec2.describe_subnets(SubnetIds=[subnet_id]) return subnet_attributes - + except Exception as e: - exit(f'Failed to get attributes of subnet: '+str(e)) \ No newline at end of file + exit("Failed to get attributes of subnet: " + str(e)) diff --git a/source/remediation_runbooks/scripts/enable_bucket_event_notifications.py b/source/remediation_runbooks/scripts/enable_bucket_event_notifications.py new file mode 100644 index 00000000..8b8229c1 --- /dev/null +++ b/source/remediation_runbooks/scripts/enable_bucket_event_notifications.py @@ -0,0 +1,147 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Configure a CloudFormation stack with an SNS topic for notifications, creating the topic if it does +not already exist +""" +import json +from typing import TYPE_CHECKING, List + +import boto3 +from botocore.config import Config +from botocore.exceptions import UnknownRegionError + +if TYPE_CHECKING: + from mypy_boto3_sns.client import SNSClient +else: + SNSClient = object + +boto_config = Config(retries={"mode": "standard"}) + + +def lambda_handler(event, _): + """ + Configure a bucket with an SNS topic for notifications, + creating the topic if it does not already exist + + `event` should have the following keys and values: + `bucket_name`: the ARN of the CloudFormation stack to be updated + `topic_name`: the name of the SQS Queue to create and configure for notifications + `account_id`: account id that contains the bucket that will have event notifications configured + `event_types`: the list of events that will have notifications alerted on. + + `context` is ignored + """ + bucket_name = event["bucket_name"] + topic_name = event["topic_name"] + account_id = event["account_id"] + event_types = event["event_types"] + topic_arn = get_or_create_topic(topic_name, bucket_name, account_id) + configure_notifications(bucket_name, topic_arn, event_types) + return assert_bucket_notifcations_configured(bucket_name, account_id) + + +def partition_from_region(session: boto3.session.Session): + """ + returns the partition for a given region + On success returns a string + On failure returns aws + """ + try: + partition = session.get_partition_for_region(session.region_name) + except UnknownRegionError: + return "aws" + + return partition + + +def get_or_create_topic(topic_name: str, bucket_name: str, account_id: str) -> str: + """Get the SNS topic arn that will be used to configure notifications, creating it if it does not already exist""" + sns: SNSClient = boto3.client("sns", config=boto_config) + # get partition and region to buildArn here, replace sourceArn under condition + session = boto3.session.Session() + region = session.region_name + partition = partition_from_region(session) + expected_topic_arn = f"arn:{partition}:sns:{region}:{account_id}:{topic_name}" + policy = { + "Version": "2012-10-17", + "Id": "ASR Notification Policy", + "Statement": [ + { + "Sid": bucket_name + " ASR Notification Policy", + "Effect": "Allow", + "Principal": {"Service": "s3.amazonaws.com"}, + "Action": ["SNS:Publish"], + "Resource": expected_topic_arn, + "Condition": { + "ArnLike": { + "aws:SourceArn": [f"arn:{partition}:s3:::" + bucket_name] + }, + "StringEquals": {"aws:SourceAccount": [account_id]}, + }, + } + ], + } + + try: + topic_attributes = sns.get_topic_attributes(TopicArn=expected_topic_arn) + topic_attributes_policy = topic_attributes["Attributes"]["Policy"] # str + topic_attributes_policy_dict = json.loads(topic_attributes_policy) # dict + for statement in topic_attributes_policy_dict["Statement"]: + if statement["Sid"] == bucket_name + " ASR Notification Policy": + return expected_topic_arn + topic_attributes_policy_dict["Statement"].append(policy["Statement"][0]) + new_topic_attributes_policy = json.dumps(topic_attributes_policy_dict) + response = sns.set_topic_attributes( + TopicArn=expected_topic_arn, + AttributeName="Policy", + AttributeValue=new_topic_attributes_policy, + ) + return expected_topic_arn + except Exception: + string_policy = json.dumps(policy) + response = sns.create_topic( + Name=topic_name, + Attributes={"Policy": string_policy}, + ) + return response["TopicArn"] + + +def configure_notifications( + bucket_name: str, topic_arn: str, event_types: List[str] +) -> None: + """Configure the bucket `bucket_name` to notify the sns topic with ARN `topic_arn`""" + s3 = boto3.client("s3", config=boto_config) + s3.put_bucket_notification_configuration( + Bucket=bucket_name, + NotificationConfiguration={ + "TopicConfigurations": [ + { + "Id": "ASR Bucket Notification Topic Config", + "Events": event_types, + "TopicArn": topic_arn, + } + ] + }, + ) + + +def assert_bucket_notifcations_configured(bucket_name, account_id): + """ + Verify that the bucket `bucket_name` is configured to update the SNS topic + with ARN `topic_arn` + """ + s3 = boto3.client("s3", config=boto_config) + notification_configuration = s3.get_bucket_notification_configuration( + Bucket=bucket_name, ExpectedBucketOwner=account_id + ) + try: + return { + "NotificationARNs": notification_configuration["TopicConfigurations"][0][ + "TopicArn" + ] + } + except Exception: + raise RuntimeError( + f"ERROR: {bucket_name} was not configured with notifications" + ) diff --git a/source/remediation_runbooks/scripts/enable_cloudfront_default_root_object.py b/source/remediation_runbooks/scripts/enable_cloudfront_default_root_object.py new file mode 100644 index 00000000..d07b68ff --- /dev/null +++ b/source/remediation_runbooks/scripts/enable_cloudfront_default_root_object.py @@ -0,0 +1,43 @@ +import datetime +import json + +import boto3 + + +def default(obj): + if isinstance(obj, (datetime.date, datetime.datetime)): + return obj.isoformat() + else: + raise TypeError("Incorrect HTTPResponse format.") + + +def verify_enable_cloudfront_default_root_object( + cloudfront_client, cloudfront_distribution +): + response = cloudfront_client.get_distribution_config(Id=cloudfront_distribution) + if response["DistributionConfig"]["DefaultRootObject"]: + return "Verification of 'EnableCloudFrontDefaultRootObject' is successful." + error = f"VERIFICATION FAILED. DEFAULT ROOT OBJECT FOR AMAZON CLOUDFRONT DISTRIBUTION {cloudfront_distribution} IS NOT SET." + raise RuntimeError(error) + + +def handler(event, _): + cloudfront_client = boto3.client("cloudfront") + cloudfront_distribution_arn = event["cloudfront_distribution"] + cloudfront_distribution_id = cloudfront_distribution_arn.split("/")[1] + response = cloudfront_client.get_distribution_config(Id=cloudfront_distribution_id) + response["DistributionConfig"]["DefaultRootObject"] = event["root_object"] + update_response = cloudfront_client.update_distribution( + DistributionConfig=response["DistributionConfig"], + Id=cloudfront_distribution_id, + IfMatch=response["ETag"], + ) + output = verify_enable_cloudfront_default_root_object( + cloudfront_client, cloudfront_distribution_id + ) + return { + "Output": { + "Message": output, + "HTTPResponse": json.dumps(update_response, default=default), + } + } diff --git a/source/remediation_runbooks/scripts/enable_delivery_status_logging.py b/source/remediation_runbooks/scripts/enable_delivery_status_logging.py index ee5af3b0..db2b7b04 100644 --- a/source/remediation_runbooks/scripts/enable_delivery_status_logging.py +++ b/source/remediation_runbooks/scripts/enable_delivery_status_logging.py @@ -2,29 +2,25 @@ # SPDX-License-Identifier: Apache-2.0 import boto3 from botocore.config import Config -from botocore.exceptions import ClientError -boto_config = Config( - retries = { - 'mode': 'standard', - 'max_attempts': 10 - } - ) +boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) + +endpointTypes = ["HTTP", "Firehose", "Lambda", "Application", "SQS"] -endpointTypes = ['HTTP', 'Firehose', 'Lambda', 'Application', 'SQS'] def connect_to_sns(): - return boto3.client('sns', config=boto_config) + return boto3.client("sns", config=boto_config) + def lambda_handler(event, _): """ - Enable delivery status logging on a SNS topic - - `event` should have the following keys and values: - `logging_role`: the ARN of the IAM Role used to log successful and failed deliveries - `topic_arn`: the arn of the SNS Topic to enable delivery status logging on - - `context` is ignored + Enable delivery status logging on a SNS topic + + `event` should have the following keys and values: + `logging_role`: the ARN of the IAM Role used to log successful and failed deliveries + `topic_arn`: the arn of the SNS Topic to enable delivery status logging on + + `context` is ignored """ logging_role = event["logging_role"] @@ -37,49 +33,94 @@ def lambda_handler(event, _): topic_attributes = get_topic_attributes(topic_arn) return { - "HTTPFailureFeedbackRoleArn": topic_attributes["Attributes"]["HTTPFailureFeedbackRoleArn"], - "HTTPSuccessFeedbackRoleArn": topic_attributes["Attributes"]["HTTPSuccessFeedbackRoleArn"], - "HTTPSuccessFeedbackSampleRate": topic_attributes["Attributes"]["HTTPSuccessFeedbackSampleRate"], - "FirehoseFailureFeedbackRoleArn": topic_attributes["Attributes"]["FirehoseFailureFeedbackRoleArn"], - "FirehoseSuccessFeedbackRoleArn": topic_attributes["Attributes"]["FirehoseSuccessFeedbackRoleArn"], - "FirehoseSuccessFeedbackSampleRate": topic_attributes["Attributes"]["FirehoseSuccessFeedbackSampleRate"], - "LambdaFailureFeedbackRoleArn": topic_attributes["Attributes"]["LambdaFailureFeedbackRoleArn"], - "LambdaSuccessFeedbackRoleArn": topic_attributes["Attributes"]["LambdaSuccessFeedbackRoleArn"], - "LambdaSuccessFeedbackSampleRate": topic_attributes["Attributes"]["LambdaSuccessFeedbackSampleRate"], - "ApplicationFailureFeedbackRoleArn": topic_attributes["Attributes"]["ApplicationFailureFeedbackRoleArn"], - "ApplicationSuccessFeedbackRoleArn": topic_attributes["Attributes"]["ApplicationSuccessFeedbackRoleArn"], - "ApplicationSuccessFeedbackSampleRate": topic_attributes["Attributes"]["ApplicationSuccessFeedbackSampleRate"], - "SQSFailureFeedbackRoleArn": topic_attributes["Attributes"]["SQSFailureFeedbackRoleArn"], - "SQSSuccessFeedbackRoleArn": topic_attributes["Attributes"]["SQSSuccessFeedbackRoleArn"], - "SQSSuccessFeedbackSampleRate": topic_attributes["Attributes"]["SQSSuccessFeedbackSampleRate"] + "HTTPFailureFeedbackRoleArn": topic_attributes["Attributes"][ + "HTTPFailureFeedbackRoleArn" + ], + "HTTPSuccessFeedbackRoleArn": topic_attributes["Attributes"][ + "HTTPSuccessFeedbackRoleArn" + ], + "HTTPSuccessFeedbackSampleRate": topic_attributes["Attributes"][ + "HTTPSuccessFeedbackSampleRate" + ], + "FirehoseFailureFeedbackRoleArn": topic_attributes["Attributes"][ + "FirehoseFailureFeedbackRoleArn" + ], + "FirehoseSuccessFeedbackRoleArn": topic_attributes["Attributes"][ + "FirehoseSuccessFeedbackRoleArn" + ], + "FirehoseSuccessFeedbackSampleRate": topic_attributes["Attributes"][ + "FirehoseSuccessFeedbackSampleRate" + ], + "LambdaFailureFeedbackRoleArn": topic_attributes["Attributes"][ + "LambdaFailureFeedbackRoleArn" + ], + "LambdaSuccessFeedbackRoleArn": topic_attributes["Attributes"][ + "LambdaSuccessFeedbackRoleArn" + ], + "LambdaSuccessFeedbackSampleRate": topic_attributes["Attributes"][ + "LambdaSuccessFeedbackSampleRate" + ], + "ApplicationFailureFeedbackRoleArn": topic_attributes["Attributes"][ + "ApplicationFailureFeedbackRoleArn" + ], + "ApplicationSuccessFeedbackRoleArn": topic_attributes["Attributes"][ + "ApplicationSuccessFeedbackRoleArn" + ], + "ApplicationSuccessFeedbackSampleRate": topic_attributes["Attributes"][ + "ApplicationSuccessFeedbackSampleRate" + ], + "SQSFailureFeedbackRoleArn": topic_attributes["Attributes"][ + "SQSFailureFeedbackRoleArn" + ], + "SQSSuccessFeedbackRoleArn": topic_attributes["Attributes"][ + "SQSSuccessFeedbackRoleArn" + ], + "SQSSuccessFeedbackSampleRate": topic_attributes["Attributes"][ + "SQSSuccessFeedbackSampleRate" + ], } + def add_roles_to_topic(logging_role, topic_arn): """ Configures the IAM role `logging_role` that will log successful and failed deliveries to SNS Topic `topic_arn` - """ + """ sns = connect_to_sns() try: for endpoint in endpointTypes: - sns.set_topic_attributes(TopicArn=topic_arn, AttributeName=f'{endpoint}SuccessFeedbackRoleArn', AttributeValue=logging_role) - sns.set_topic_attributes(TopicArn=topic_arn, AttributeName=f'{endpoint}FailureFeedbackRoleArn', AttributeValue=logging_role) + sns.set_topic_attributes( + TopicArn=topic_arn, + AttributeName=f"{endpoint}SuccessFeedbackRoleArn", + AttributeValue=logging_role, + ) + sns.set_topic_attributes( + TopicArn=topic_arn, + AttributeName=f"{endpoint}FailureFeedbackRoleArn", + AttributeValue=logging_role, + ) except Exception as e: reset_to_recognized_state(topic_arn) - exit(f'Failed to set success/failure role of topic {topic_arn}: {str(e)}') + exit(f"Failed to set success/failure role of topic {topic_arn}: {str(e)}") + def add_sample_rate_to_topic(topic_arn, sample_rate): """ Configures the Success sample rate, the percentage of successful messages for which you want to receive CloudWatch Logs. - """ + """ sns = connect_to_sns() try: for endpoint in endpointTypes: - sns.set_topic_attributes(TopicArn=topic_arn, AttributeName=f'{endpoint}SuccessFeedbackSampleRate', AttributeValue=sample_rate) + sns.set_topic_attributes( + TopicArn=topic_arn, + AttributeName=f"{endpoint}SuccessFeedbackSampleRate", + AttributeValue=sample_rate, + ) except Exception as e: reset_to_recognized_state(topic_arn) - exit(f'Failed to set success sample rate of SNS topic {topic_arn}: {str(e)}') + exit(f"Failed to set success sample rate of SNS topic {topic_arn}: {str(e)}") + def get_topic_attributes(topic_arn): """ @@ -91,7 +132,8 @@ def get_topic_attributes(topic_arn): return topic_attributes except Exception as e: - exit(f'Failed to get attributes of SNS topic {topic_arn}: {str(e)}') + exit(f"Failed to get attributes of SNS topic {topic_arn}: {str(e)}") + def reset_to_recognized_state(topic_arn): """ @@ -100,7 +142,17 @@ def reset_to_recognized_state(topic_arn): sns = connect_to_sns() for endpoint in endpointTypes: try: - sns.set_topic_attributes(TopicArn=topic_arn, AttributeName=f'{endpoint}SuccessFeedbackRoleArn', AttributeValue='') - sns.set_topic_attributes(TopicArn=topic_arn, AttributeName=f'{endpoint}FailureFeedbackRoleArn', AttributeValue='') + sns.set_topic_attributes( + TopicArn=topic_arn, + AttributeName=f"{endpoint}SuccessFeedbackRoleArn", + AttributeValue="", + ) + sns.set_topic_attributes( + TopicArn=topic_arn, + AttributeName=f"{endpoint}FailureFeedbackRoleArn", + AttributeValue="", + ) except Exception: - print(f'There was an error while resetting SNS Topic {topic_arn}, please manually turn off delivery status logging for protocol {endpoint}') + print( + f"There was an error while resetting SNS Topic {topic_arn}, please manually turn off delivery status logging for protocol {endpoint}" + ) diff --git a/source/remediation_runbooks/scripts/enable_imds_v2_on_instance.py b/source/remediation_runbooks/scripts/enable_imds_v2_on_instance.py new file mode 100644 index 00000000..0b726693 --- /dev/null +++ b/source/remediation_runbooks/scripts/enable_imds_v2_on_instance.py @@ -0,0 +1,67 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import boto3 +from botocore.config import Config + +boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) + + +def connect_to_ec2(): + return boto3.client("ec2", config=boto_config) + + +def lambda_handler(event, _): + """ + Enable IMDSv2 on an EC2 Instance. + + `event` should have the following keys and values: + `instance_arn`: the ARN of the instance that does not have IMDSv2 enabled. + + `context` is ignored + """ + + instance_arn = event["instance_arn"] + + instance_id = instance_arn.split("/")[1] + + enable_imdsv2(instance_id) + + instance_attributes = describe_instance(instance_id) + + imds_v2_attribute = instance_attributes["Reservations"][0]["Instances"][0][ + "MetadataOptions" + ] + + if imds_v2_attribute["HttpTokens"] == "required": + return imds_v2_attribute + + raise RuntimeError( + f"ASR Remediation failed - {instance_id} did not have IMDSv2 enabled." + ) + + +def enable_imdsv2(instance_id): + """ + Changes EC2 Instance metadata options to require IMDSv2 + """ + ec2 = connect_to_ec2() + try: + ec2.modify_instance_metadata_options( + InstanceId=instance_id, HttpTokens="required", HttpEndpoint="enabled" + ) + + except Exception as e: + exit("There was an error enabling IMDSv2: " + str(e)) + + +def describe_instance(instance_id): + """ + Grabs Instance Attributes to verify IMDSv2 values were set as expected. + """ + ec2 = connect_to_ec2() + try: + instance_attributes = ec2.describe_instances(InstanceIds=[instance_id]) + return instance_attributes + + except Exception as e: + exit("Failed to get attributes of instance: " + str(e)) diff --git a/source/remediation_runbooks/scripts/enable_minor_version_upgrade_rds.py b/source/remediation_runbooks/scripts/enable_minor_version_upgrade_rds.py index 4ae24f05..c4577d9c 100644 --- a/source/remediation_runbooks/scripts/enable_minor_version_upgrade_rds.py +++ b/source/remediation_runbooks/scripts/enable_minor_version_upgrade_rds.py @@ -3,26 +3,23 @@ import boto3 from botocore.config import Config -boto_config = Config( - retries = { - 'mode': 'standard', - 'max_attempts': 10 - } - ) +boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) + +multi_az_cluster_engines = ["mysql", "postgres"] -multi_az_cluster_engines = ["mysql","postgres"] def connect_to_rds(): - return boto3.client('rds', config=boto_config) + return boto3.client("rds", config=boto_config) + def lambda_handler(event, _): """ - Enable auto minor version upgrades on an instance or a Multi-AZ Cluster - - `event` should have the following keys and values: - `DBInstanceIdentifier`: The identifier of the database instance that is to be modified. - - `context` is ignored + Enable auto minor version upgrades on an instance or a Multi-AZ Cluster + + `event` should have the following keys and values: + `DBInstanceIdentifier`: The identifier of the database instance that is to be modified. + + `context` is ignored """ db_instance_id = event["DBInstanceIdentifier"] @@ -30,12 +27,12 @@ def lambda_handler(event, _): found_instance = rds.describe_db_instances(DBInstanceIdentifier=db_instance_id) - instance_info = found_instance['DBInstances'][0] + instance_info = found_instance["DBInstances"][0] response = False - if ("DBClusterIdentifier" in instance_info.keys()): - if (multi_az_check(instance_info["DBClusterIdentifier"])): + if "DBClusterIdentifier" in instance_info.keys(): + if multi_az_check(instance_info["DBClusterIdentifier"]): cluster_id = instance_info["DBClusterIdentifier"] enable_minor_version_upgrade_cluster(cluster_id) response = verify_cluster_changes(cluster_id) @@ -45,76 +42,88 @@ def lambda_handler(event, _): else: enable_minor_version_upgrade_instance(db_instance_id) response = verify_instance_changes(db_instance_id) - - if response == True: - return { - "AutoMinorVersionUpgrade": response - } - - raise RuntimeError(f'ASR Remediation failed - {db_instance_id} did not have enable auto minor version upgrades enabled.') + + if response is True: + return {"AutoMinorVersionUpgrade": response} + + raise RuntimeError( + f"ASR Remediation failed - {db_instance_id} did not have enable auto minor version upgrades enabled." + ) + def multi_az_check(cluster_id): """ Checks to see if the cluster is Multi-AZ. Instances within clusters that match this check are not able to be modified. - """ + """ rds = connect_to_rds() try: found_cluster = rds.describe_db_clusters(DBClusterIdentifier=cluster_id) - cluster_info = found_cluster['DBClusters'][0] + cluster_info = found_cluster["DBClusters"][0] except Exception as e: - exit(f'Failed to get information about the cluster: {cluster_id}. Error: {e}') + exit(f"Failed to get information about the cluster: {cluster_id}. Error: {e}") - return ((cluster_info["MultiAZ"] == True) and (cluster_info["Engine"] in multi_az_cluster_engines)) + return (cluster_info["MultiAZ"] is True) and ( + cluster_info["Engine"] in multi_az_cluster_engines + ) def enable_minor_version_upgrade_cluster(cluster_id): """ Enables automatic minor version upgrade for a Multi-AZ Cluster. - """ + """ rds = connect_to_rds() try: - rds.modify_db_cluster(DBClusterIdentifier=cluster_id,AutoMinorVersionUpgrade=True) + rds.modify_db_cluster( + DBClusterIdentifier=cluster_id, AutoMinorVersionUpgrade=True + ) except Exception as e: - exit(f'Failed to modify the cluster: {cluster_id}. Error: {e}') + exit(f"Failed to modify the cluster: {cluster_id}. Error: {e}") + def enable_minor_version_upgrade_instance(instance_id): """ Enables automatic minor version upgrade for an instance. - """ + """ rds = connect_to_rds() try: - rds.modify_db_instance(DBInstanceIdentifier=instance_id,AutoMinorVersionUpgrade=True) + rds.modify_db_instance( + DBInstanceIdentifier=instance_id, AutoMinorVersionUpgrade=True + ) except Exception as e: - exit(f'Failed to modify the instance: {instance_id}. Error: {e}') + exit(f"Failed to modify the instance: {instance_id}. Error: {e}") + def verify_cluster_changes(cluster_id): """ Verifies automatic minor version upgrade for a Multi-AZ cluster. - """ + """ rds = connect_to_rds() try: - found_cluster = rds.describe_db_clusters(DBClusterIdentifier=cluster_id, MaxRecords=100) - cluster_info = found_cluster['DBClusters'][0] + found_cluster = rds.describe_db_clusters( + DBClusterIdentifier=cluster_id, MaxRecords=100 + ) + cluster_info = found_cluster["DBClusters"][0] except Exception as e: - exit(f'Failed to verify cluster changes: {cluster_id}. Error: {e}') + exit(f"Failed to verify cluster changes: {cluster_id}. Error: {e}") + + return cluster_info["AutoMinorVersionUpgrade"] + - return cluster_info['AutoMinorVersionUpgrade'] - def verify_instance_changes(instance_id): """ Verifies automatic minor version upgrade for an instance. - """ + """ rds = connect_to_rds() try: - found_instance = rds.describe_db_instances(DBInstanceIdentifier=instance_id, MaxRecords=100) - instance_info = found_instance['DBInstances'][0] + found_instance = rds.describe_db_instances( + DBInstanceIdentifier=instance_id, MaxRecords=100 + ) + instance_info = found_instance["DBInstances"][0] except Exception as e: + exit(f"Failed to verify instance changes: {instance_id}. Error: {e}") - exit(f'Failed to verify instance changes: {instance_id}. Error: {e}') - - return instance_info['AutoMinorVersionUpgrade'] - + return instance_info["AutoMinorVersionUpgrade"] diff --git a/source/remediation_runbooks/scripts/remove_codebuild_privileged_mode.py b/source/remediation_runbooks/scripts/remove_codebuild_privileged_mode.py new file mode 100644 index 00000000..072601f7 --- /dev/null +++ b/source/remediation_runbooks/scripts/remove_codebuild_privileged_mode.py @@ -0,0 +1,68 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import boto3 +from botocore.config import Config + +boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) + + +def connect_to_codebuild(): + return boto3.client("codebuild", config=boto_config) + + +def lambda_handler(event, _): + """ + Removes CodeBuild privileged mode from a project. + + `event` should have the following keys and values: + `project_name`: the name of the codebuild project with privileged mode enabled. + + `context` is ignored + """ + project_name = event["project_name"] + + project_attributes = get_project_info(project_name) + + initial_environment = project_attributes["projects"][0]["environment"] + + initial_environment["privilegedMode"] = False + + remove_privileged_mode(project_name, initial_environment) + + updated_project_attributes = get_project_info(project_name) + + privileged_status = updated_project_attributes["projects"][0]["environment"][ + "privilegedMode" + ] + + if privileged_status is False: + return {"privilegedMode": privileged_status} + + raise RuntimeError( + f"ASR Remediation failed - {project_name} did not have privileged mode removed from project." + ) + + +def remove_privileged_mode(project_name, environment): + """ + Removes privileged_status from CodeBuild Project + """ + codebuild = connect_to_codebuild() + try: + codebuild.update_project(name=project_name, environment=environment) + + except Exception as e: + exit("There was an error updating codebuild project: " + str(e)) + + +def get_project_info(project_name): + """ + Gets CodeBuild Project info + """ + codebuild = connect_to_codebuild() + try: + project_attributes = codebuild.batch_get_projects(names=[project_name]) + return project_attributes + + except Exception as e: + exit("Failed to get attributes of project: " + str(e)) diff --git a/source/remediation_runbooks/scripts/test/conftest.py b/source/remediation_runbooks/scripts/test/conftest.py index 3ea5b014..08432a5a 100644 --- a/source/remediation_runbooks/scripts/test/conftest.py +++ b/source/remediation_runbooks/scripts/test/conftest.py @@ -1,8 +1,10 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import os + import pytest + @pytest.fixture(scope="module", autouse=True) def aws_credentials(): os.environ["AWS_ACCESS_KEY_ID"] = "testing" diff --git a/source/remediation_runbooks/scripts/test/test_DisableTGWAutoAcceptSharedAttachments.py b/source/remediation_runbooks/scripts/test/test_DisableTGWAutoAcceptSharedAttachments.py new file mode 100644 index 00000000..ff81acaf --- /dev/null +++ b/source/remediation_runbooks/scripts/test/test_DisableTGWAutoAcceptSharedAttachments.py @@ -0,0 +1,33 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test the functionality of the `DisableTGWAutoAcceptSharedAttachments` remediation script""" + +import boto3 +from botocore.config import Config +from DisableTGWAutoAcceptSharedAttachments import lambda_handler +from moto import mock_aws + + +@mock_aws +def test_disable_tgw_autoaccept_shared_attachments(): + BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + + ec2 = boto3.client("ec2", config=BOTO_CONFIG) + + # Create new transit gateway with AutoAcceptSharedAttachments enabled + tgw = ec2.create_transit_gateway(Options={"AutoAcceptSharedAttachments": "enable"}) + + # Gather ID of the newly created transit gateway, store in event + tgw_id = tgw["TransitGateway"]["TransitGatewayId"] + event = {"TransitGatewayId": tgw_id} + + # Run remediation to disable AutoAcceptSharedAttachments given the transit gateway ID + lambda_handler(event, {}) + + # Check AutoAcceptSharedAttachments option after remediation is run + tgw_updated = ec2.describe_transit_gateways(TransitGatewayIds=[tgw_id]) + + assert ( + tgw_updated["TransitGateways"][0]["Options"]["AutoAcceptSharedAttachments"] + == "disable" + ) diff --git a/source/remediation_runbooks/scripts/test/test_EnableAutoSecretRotation.py b/source/remediation_runbooks/scripts/test/test_EnableAutoSecretRotation.py new file mode 100644 index 00000000..88d2accf --- /dev/null +++ b/source/remediation_runbooks/scripts/test/test_EnableAutoSecretRotation.py @@ -0,0 +1,302 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +import boto3 +import pytest +from botocore.config import Config +from botocore.stub import Stubber +from EnableAutoSecretRotation import lambda_handler as remediation + +BOTO_CONFIG = Config( + retries={"mode": "standard", "max_attempts": 10}, region_name="us-east-1" +) + +# Example name and ARN for the Secrets Manager secret +SECRET_NAME = "test-secret" +SECRET_ARN = ( + f"arn:aws:secretsmanager:us-east-1:123456789012:secret:{SECRET_NAME}-PyhdWC" +) + +# Example name and ARN for IAM role used for Lambda rotation function +ROLE_NAME = "rotation_function_role" +ROLE_ARN = f"arn:aws:iam::123456789012:role/service-role/{ROLE_NAME}" + +# Example role trust policy for the Lambda rotation function +ROLE_TRUST_POLICY = '{ "Version": "2012-10-17", "Statement": [ { "Effect": "Allow", "Principal": { "Service": "lambda.amazonaws.com" }, "Action": "sts:AssumeRole" } ] }' + +# Example name and ARN for Lambda rotation function +FUNCTION_NAME = "rotation_function" +FUNCTION_ARN = f"arn:aws:lambda:us-east-1:123456789012:function:{FUNCTION_NAME}" + + +def add_response_create_secret(stubber): + # Add response to create_secret + stubber.add_response( + "create_secret", + { + "ARN": SECRET_ARN, + "Name": SECRET_NAME, + }, + { + "Name": SECRET_NAME, + "SecretString": "test-secret-value", + }, + ) + + +# ===================================================================================== +# EnableAutoSecretRotation SUCCESS +# ===================================================================================== +def test_enable_rotation_success(mocker): + # Setup client for Secrets Manager + secretsmanager_client = boto3.client("secretsmanager", config=BOTO_CONFIG) + stub_secretsmanager = Stubber(secretsmanager_client) + + # Setup client for IAM + iam_client = boto3.client("iam", config=BOTO_CONFIG) + stub_iam = Stubber(iam_client) + + # Setup client for Lambda + lambda_client = boto3.client("lambda", config=BOTO_CONFIG) + stub_lambda = Stubber(lambda_client) + + # Add response to create_secret + add_response_create_secret(stub_secretsmanager) + + # Add response to create IAM execution role for Lambda rotation function + stub_iam.add_response( + "create_role", + { + "Role": { + "Path": "/service-role/", + "RoleName": ROLE_NAME, + "RoleId": "AAAAAAAAAAAAAAAA", + "Arn": ROLE_ARN, + "CreateDate": "2022-01-01T00:00:00Z", + }, + }, + { + "RoleName": ROLE_NAME, + "AssumeRolePolicyDocument": ROLE_TRUST_POLICY, + }, + ) + + # Add response to create fake Lambda rotation function + stub_lambda.add_response( + "create_function", + { + "FunctionName": FUNCTION_NAME, + }, + { + "FunctionName": FUNCTION_NAME, + "Role": ROLE_ARN, + "Runtime": "python3.8", + "Code": {"ZipFile": b"0"}, + }, + ) + + # Add response to setup automatic rotation with fake Lambda function + stub_secretsmanager.add_response( + "rotate_secret", + { + "ARN": SECRET_ARN, + "Name": SECRET_NAME, + }, + { + "SecretId": SECRET_ARN, + "RotationLambdaARN": FUNCTION_ARN, + "RotationRules": { + "AutomaticallyAfterDays": 90, + }, + "RotateImmediately": False, + }, + ) + + # Add response to cancel automatic rotation + stub_secretsmanager.add_response( + "cancel_rotate_secret", + { + "ARN": SECRET_ARN, + }, + { + "SecretId": SECRET_ARN, + }, + ) + + # Add response to describe secret with rotation disabled + stub_secretsmanager.add_response( + "describe_secret", + { + "ARN": SECRET_ARN, + "RotationEnabled": False, + "RotationLambdaARN": FUNCTION_ARN, + "RotationRules": { + "AutomaticallyAfterDays": 90, + }, + }, + { + "SecretId": SECRET_ARN, + }, + ) + + # Add response to set automatic rotation for remediation script + stub_secretsmanager.add_response( + "rotate_secret", + { + "ARN": SECRET_ARN, + "Name": SECRET_NAME, + }, + { + "SecretId": SECRET_ARN, + "RotationRules": { + "AutomaticallyAfterDays": 90, + }, + "RotateImmediately": False, + }, + ) + + # Add response to describe secret with rotation enabled for remediation script + stub_secretsmanager.add_response( + "describe_secret", + { + "ARN": SECRET_ARN, + "RotationEnabled": True, + "RotationLambdaARN": FUNCTION_ARN, + "RotationRules": { + "AutomaticallyAfterDays": 90, + }, + }, + { + "SecretId": SECRET_ARN, + }, + ) + + # Activate stubbers + stub_secretsmanager.activate() + stub_iam.activate() + stub_lambda.activate() + + mocker.patch( + "EnableAutoSecretRotation.connect_to_secretsmanager", + return_value=secretsmanager_client, + ) + + # Create test secret without automatic rotation + secretsmanager_client.create_secret( + Name="test-secret", + SecretString="test-secret-value", + ) + + # Create IAM execution role for Lambda rotation function + iam_client.create_role( + RoleName=ROLE_NAME, + AssumeRolePolicyDocument=ROLE_TRUST_POLICY, + ) + + # Create fake Lambda rotation function + lambda_client.create_function( + FunctionName=FUNCTION_NAME, + Role=ROLE_ARN, + Runtime="python3.8", + Code={ + "ZipFile": b"0", + }, + ) + + # Setup automatic rotation with fake Lambda function + secretsmanager_client.rotate_secret( + SecretId=SECRET_ARN, + RotationLambdaARN=FUNCTION_ARN, + RotationRules={ + "AutomaticallyAfterDays": 90, + }, + RotateImmediately=False, + ) + + # Cancel automatic rotation + secretsmanager_client.cancel_rotate_secret(SecretId=SECRET_ARN) + + # Describe secret with rotation disabled + secret = secretsmanager_client.describe_secret(SecretId=SECRET_ARN) + + # Assert automatic rotation is disabled + assert "RotationEnabled" in secret + assert not secret["RotationEnabled"] + + # Run remediation script + event = {"SecretARN": SECRET_ARN, "MaximumAllowedRotationFrequency": 90} + response = remediation(event, {}) + + assert response == { + "message": "Enabled automatic secret rotation every 90 days with previously set rotation function.", + "status": "Success", + } + + stub_secretsmanager.deactivate() + stub_iam.deactivate() + stub_lambda.deactivate() + + +# ===================================================================================== +# EnableAutoSecretRotation FAILURE +# ===================================================================================== +def test_enable_rotation_failure(mocker): + secretsmanager_client = boto3.client("secretsmanager", config=BOTO_CONFIG) + stub_secretsmanager = Stubber(secretsmanager_client) + + # Add response to create_secret + add_response_create_secret(stub_secretsmanager) + + # Add response to describe_secret + stub_secretsmanager.add_response( + "describe_secret", + { + "ARN": SECRET_ARN, + "Name": SECRET_NAME, + }, + { + "SecretId": SECRET_ARN, + }, + ) + + # Add error response to describe_secret + stub_secretsmanager.add_client_error( + "describe_secret", + "InvalidRequestException", + "No Lambda rotation function ARN is associated with this secret.", + ) + + # Activate stubber + stub_secretsmanager.activate() + + mocker.patch( + "EnableAutoSecretRotation.connect_to_secretsmanager", + return_value=secretsmanager_client, + ) + + # Create test secret without automatic rotation + secretsmanager_client.create_secret( + Name="test-secret", SecretString="test-secret-value" + ) + + # Describe test secret + secret = secretsmanager_client.describe_secret(SecretId=SECRET_ARN) + + # Assert secret rotation has never been enabled + assert "RotationEnabled" not in secret + + # Run remediation script + event = {"SecretARN": SECRET_ARN, "MaximumAllowedRotationFrequency": 90} + with pytest.raises(Exception) as pytest_wrapped_e: + remediation(event, {}) + + # Assert remediation script fails because no Lambda function was provided. + assert ( + pytest_wrapped_e.type + == secretsmanager_client.exceptions.InvalidRequestException + ) + assert "No Lambda rotation function ARN is associated with this secret." in str( + pytest_wrapped_e.value + ) + + stub_secretsmanager.deactivate() diff --git a/source/remediation_runbooks/scripts/test/test_EnableGuardDuty.py b/source/remediation_runbooks/scripts/test/test_EnableGuardDuty.py new file mode 100644 index 00000000..a059a0ee --- /dev/null +++ b/source/remediation_runbooks/scripts/test/test_EnableGuardDuty.py @@ -0,0 +1,44 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +import boto3 +from botocore.config import Config +from EnableGuardDuty import lambda_handler as remediation +from moto import mock_aws + +BOTO_CONFIG = Config( + retries={"mode": "standard", "max_attempts": 10}, region_name="us-east-1" +) + + +# Test 1: Ensure existing GuardDuty detectors are enabled. +@mock_aws +def test_guardduty_enablement(): + guardduty = boto3.client("guardduty", config=BOTO_CONFIG) + # Create GuardDuty detector in disabled state + detector_id = guardduty.create_detector(Enable=False)["DetectorId"] + + # Check GuardDuty detector is disabled + print(guardduty.get_detector(DetectorId=detector_id)["Status"]) + + remediation(_={}, __="") + + # Assert GuardDuty detector is enabled after remediation is run + assert guardduty.get_detector(DetectorId=detector_id)["Status"] == "ENABLED" + + +# Test 2: Ensure a GuardDuty detector is created and enabled. +@mock_aws +def test_create_detector(): + # Run remediation, without GuardDuty detector created + remediation(_={}, __="") + guardduty = boto3.client("guardduty", config=BOTO_CONFIG) + + detector_list = guardduty.list_detectors()["DetectorIds"] + + # Assert there is a GuardDuty detector after remediation is run + assert detector_list != [] + + # Assert that the GuardDuty detector is enabled + for detector_id in detector_list: + assert guardduty.get_detector(DetectorId=detector_id)["Status"] == "ENABLED" diff --git a/source/remediation_runbooks/scripts/test/test_EnablePrivateRepositoryScanning.py b/source/remediation_runbooks/scripts/test/test_EnablePrivateRepositoryScanning.py new file mode 100644 index 00000000..822be576 --- /dev/null +++ b/source/remediation_runbooks/scripts/test/test_EnablePrivateRepositoryScanning.py @@ -0,0 +1,36 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test the functionality of the `EnablePrivateRepositoryScanning` remediation script""" + +import boto3 +from botocore.config import Config +from EnablePrivateRepositoryScanning import lambda_handler +from moto import mock_aws + + +@mock_aws +def test_enable_private_repo_scanning(): + BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + + ecr = boto3.client("ecr", config=BOTO_CONFIG) + + # Create private repository with scannning disabled + repository = ecr.create_repository( + repositoryName="test-ecr.1-repo", + imageScanningConfiguration={"scanOnPush": False}, + ) + + # Get the repository name + repository_name = repository["repository"]["repositoryName"] + event = {"RepositoryName": repository_name} + + # Run remediation script + lambda_handler(event, {}) + + # Get the updated repository + updated_repository = ecr.describe_repositories(repositoryNames=[repository_name]) + + # Verify the repository now has scanning enabled + assert updated_repository["repositories"][0]["imageScanningConfiguration"][ + "scanOnPush" + ] diff --git a/source/remediation_runbooks/scripts/test/test_RemoveUnusedSecret.py b/source/remediation_runbooks/scripts/test/test_RemoveUnusedSecret.py new file mode 100644 index 00000000..8bb30218 --- /dev/null +++ b/source/remediation_runbooks/scripts/test/test_RemoveUnusedSecret.py @@ -0,0 +1,186 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +from datetime import datetime, timedelta, timezone + +import boto3 +import pytest +from botocore.config import Config +from botocore.stub import Stubber +from RemoveUnusedSecret import lambda_handler as remediation + +BOTO_CONFIG = Config( + retries={"mode": "standard", "max_attempts": 10}, region_name="us-east-1" +) + +# Example name and ARN for the Secrets Manager secret +SECRET_NAME = "test-secret" +SECRET_ARN = ( + f"arn:aws:secretsmanager:us-east-1:123456789012:secret:{SECRET_NAME}-PyhdWC" +) + +# Current date in the same format SecretsManager tracks LastAccessedDate +DATE_TODAY = datetime.now().replace( + hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc +) + +# Parameter for SecretsManager.3 control that specifies how many days a secret can be unused +UNUSED_FOR_DAYS = 90 + + +def add_response_create_secret(stubber): + # Add response to create_secret + stubber.add_response( + "create_secret", + { + "ARN": SECRET_ARN, + "Name": SECRET_NAME, + }, + { + "Name": SECRET_NAME, + "SecretString": "test-secret-value", + }, + ) + + +# ===================================================================================== +# RemoveUnusedSecret SUCCESS +# ===================================================================================== +def test_remove_unused_secret_success(mocker): + # Setup client for Secrets Manager + secretsmanager_client = boto3.client("secretsmanager", config=BOTO_CONFIG) + stubber = Stubber(secretsmanager_client) + + # Add response to create_secret + add_response_create_secret(stubber) + + # Add response to describe_secret for remediation script with LastAccessedDate > 90 + stubber.add_response( + "describe_secret", + { + "ARN": SECRET_ARN, + "LastAccessedDate": DATE_TODAY - timedelta(days=91), + }, + { + "SecretId": SECRET_ARN, + }, + ) + + # Add response to delete_secret for remediation script + stubber.add_response( + "delete_secret", + { + "DeletionDate": DATE_TODAY, + }, + { + "SecretId": SECRET_ARN, + "RecoveryWindowInDays": 30, + }, + ) + + stubber.add_response( + "describe_secret", + { + "ARN": SECRET_ARN, + "DeletedDate": DATE_TODAY, + }, + { + "SecretId": SECRET_ARN, + }, + ) + + # Activate stubber + stubber.activate() + + mocker.patch( + "RemoveUnusedSecret.connect_to_secretsmanager", + return_value=secretsmanager_client, + ) + + # Create test secret + secret = secretsmanager_client.create_secret( + Name="test-secret", SecretString="test-secret-value" + ) + + # Extract the ARN of the secret from response + secret_arn = secret["ARN"] + + event = { + "SecretARN": secret_arn, + "UnusedForDays": UNUSED_FOR_DAYS, + } + + # Execute the remediation script + response = remediation(event, {}) + + # Assert te remediation successfully deleted the secret + assert response["status"] == "Success" + + # Describe the secret pending deletion + deleted_secret = secretsmanager_client.describe_secret(SecretId=secret_arn) + + # Assert the secret is scheduled for deletion + assert "DeletedDate" in deleted_secret + + # Deactivate stubber + stubber.deactivate() + + +# ===================================================================================== +# RemoveUnusedSecret FAILURE +# If secret has been accessed within the days specified by UNUSED_FOR_DAYS +# ===================================================================================== +def test_remove_unused_secret_failure(mocker): + # Setup client for Secrets Manager + secretsmanager_client = boto3.client("secretsmanager", config=BOTO_CONFIG) + stubber = Stubber(secretsmanager_client) + + # Add response to create_secret + add_response_create_secret(stubber) + + # Add response to describe_secret for remediation script with LastAccessedDate < 90 + stubber.add_response( + "describe_secret", + { + "ARN": SECRET_ARN, + "LastAccessedDate": DATE_TODAY - timedelta(days=89), + }, + { + "SecretId": SECRET_ARN, + }, + ) + + # Activate stubber + stubber.activate() + + mocker.patch( + "RemoveUnusedSecret.connect_to_secretsmanager", + return_value=secretsmanager_client, + ) + + # Create test secret + secret = secretsmanager_client.create_secret( + Name="test-secret", SecretString="test-secret-value" + ) + + # Extract the ARN of the secret from response + secret_arn = secret["ARN"] + + event = { + "SecretARN": secret_arn, + "UnusedForDays": UNUSED_FOR_DAYS, + } + + # Execute the remediation script + with pytest.raises(SystemExit) as pytest_wrapped_e: + remediation(event, {}) + + # Assert the remediation script fails + assert pytest_wrapped_e.type == SystemExit + + assert "cannot be deleted because it has been accessed within the past" in str( + pytest_wrapped_e.value + ) + + # Deactivate stubber + stubber.deactivate() diff --git a/source/remediation_runbooks/scripts/test/test_RevokeUnauthorizedInboundRules.py b/source/remediation_runbooks/scripts/test/test_RevokeUnauthorizedInboundRules.py new file mode 100644 index 00000000..6d069a40 --- /dev/null +++ b/source/remediation_runbooks/scripts/test/test_RevokeUnauthorizedInboundRules.py @@ -0,0 +1,195 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test the functionality of the `RevokeUnauthorizedInboundRules` remediation script""" + +import boto3 +from botocore.config import Config +from moto import mock_aws +from RevokeUnauthorizedInboundRules import lambda_handler as remediation + +BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + +# IPV4 and IPV6 open access +OPENIPV4 = "0.0.0.0/0" +OPENIPV6 = "::/0" + +# Example list of authorized TCP ports +AUTHORIZED_TCP_PORTS = ["80", "443"] + +# Example list of authorized TCP ports +AUTHORIZED_UDP_PORTS = ["80", "443"] + +# Set of authorized ports, +AUTHORIZED_PORTS_SET = {80, 443} + +# Example ports for security group rules +# 123 should be removed by remediation +# 80 and 443 should remain +PORTS_FOR_SG_RULE = {123, 80, 443} + +# Protocols to check +PROTOCOLS = {"tcp", "udp"} + + +def connect_to_ec2(): + return boto3.client("ec2", config=BOTO_CONFIG) + + +@mock_aws +def test_revoke_unauthorized_inbound_rules_ipv4(): + # Connect to EC2 and Config services + ec2 = connect_to_ec2() + + # Create security group + sg = ec2.create_security_group( + GroupName="test_group", Description="Test security group" + ) + + # Get the security group ID + sg_id = sg["GroupId"] + + # Add unrestricted TCP rules to security group + for port in PORTS_FOR_SG_RULE: + ec2.authorize_security_group_ingress( + GroupId=sg_id, + IpPermissions=[ + { + "FromPort": port, + "IpProtocol": "tcp", + "IpRanges": [ + { + "CidrIp": OPENIPV4, + }, + ], + "ToPort": port, + } + ], + ) + + # Add unrestricted UDP rules to security group + for port in PORTS_FOR_SG_RULE: + ec2.authorize_security_group_ingress( + GroupId=sg_id, + IpPermissions=[ + { + "FromPort": port, + "IpProtocol": "udp", + "IpRanges": [ + { + "CidrIp": OPENIPV4, + }, + ], + "ToPort": port, + } + ], + ) + + # Execute remediation + event = { + "SecurityGroupId": sg_id, + "AuthorizedTcpPorts": AUTHORIZED_TCP_PORTS, + "AuthorizedUdpPorts": AUTHORIZED_UDP_PORTS, + } + remediation(event, {}) + + # Gather updated security group rules + security_group_rules = ec2.describe_security_group_rules( + Filters=[ + { + "Name": "group-id", + "Values": [ + sg_id, + ], + }, + ], + ) + + # Check that all open access rules are on authorized ports + for rule in security_group_rules["SecurityGroupRules"]: + # Check only TCP/UDP rules + if rule["IpProtocol"] in PROTOCOLS and "CidrIpv4" in rule: + # Check only IPv4 rules with open access + if rule["CidrIpv4"] == OPENIPV4: + # Check that ports are authorized + for port in range(rule["FromPort"], rule["ToPort"] + 1): + assert port in AUTHORIZED_PORTS_SET + + +@mock_aws +def test_revoke_unauthorized_inbound_rules_ipv6(): + # Connect to EC2 and Config services + ec2 = connect_to_ec2() + + # Create security group + sg = ec2.create_security_group( + GroupName="test_group", Description="Test security group" + ) + + # Get the security group ID + sg_id = sg["GroupId"] + + # Add unrestricted TCP rules to security group + for port in PORTS_FOR_SG_RULE: + ec2.authorize_security_group_ingress( + GroupId=sg_id, + IpPermissions=[ + { + "FromPort": port, + "IpProtocol": "tcp", + "Ipv6Ranges": [ + { + "CidrIpv6": OPENIPV6, + }, + ], + "ToPort": port, + } + ], + ) + + # Add unrestricted UDP rules to security group + for port in PORTS_FOR_SG_RULE: + ec2.authorize_security_group_ingress( + GroupId=sg_id, + IpPermissions=[ + { + "FromPort": port, + "IpProtocol": "udp", + "Ipv6Ranges": [ + { + "CidrIpv6": OPENIPV6, + }, + ], + "ToPort": port, + } + ], + ) + + # Execute remediation + event = { + "SecurityGroupId": sg_id, + "AuthorizedTcpPorts": AUTHORIZED_TCP_PORTS, + "AuthorizedUdpPorts": AUTHORIZED_UDP_PORTS, + } + remediation(event, {}) + + # Gather updated security group rules + security_group_rules = ec2.describe_security_group_rules( + Filters=[ + { + "Name": "group-id", + "Values": [ + sg_id, + ], + }, + ], + ) + + # Check that all open access rules are on authorized ports + for rule in security_group_rules["SecurityGroupRules"]: + # Check only TCP/UDP rules + if rule["IpProtocol"] in PROTOCOLS and "CidrIpv6" in rule: + # Check only IPv6 rules with open access + if rule["CidrIpv6"] == OPENIPV6: + # Check that ports are authorized + for port in range(rule["FromPort"], rule["ToPort"] + 1): + assert port in AUTHORIZED_PORTS_SET diff --git a/source/remediation_runbooks/scripts/test/test_SetCloudFrontOriginDomain.py b/source/remediation_runbooks/scripts/test/test_SetCloudFrontOriginDomain.py new file mode 100755 index 00000000..5b5ede55 --- /dev/null +++ b/source/remediation_runbooks/scripts/test/test_SetCloudFrontOriginDomain.py @@ -0,0 +1,63 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +import boto3 +from botocore.config import Config +from moto import mock_aws +from SetCloudFrontOriginDomain import lambda_handler as remediation + +BOTO_CONFIG = Config( + retries={"mode": "standard", "max_attempts": 10}, region_name="us-east-1" +) + + +@mock_aws +def test_update_root_distribution(): + cloudfront_client = boto3.client("cloudfront", config=BOTO_CONFIG) + response = cloudfront_client.create_distribution( + DistributionConfig={ + "CallerReference": "my-distribution-7-20-2", + "Aliases": {"Quantity": 1, "Items": ["test.com"]}, + "Origins": { + "Quantity": 1, + "Items": [ + { + "Id": "my-origin", + "DomainName": "nonexistentbucket.com", + "CustomOriginConfig": { + "HTTPPort": 80, + "HTTPSPort": 443, + "OriginProtocolPolicy": "https-only", + "OriginSslProtocols": {"Quantity": 1, "Items": ["TLSv1.2"]}, + }, + } + ], + }, + "DefaultCacheBehavior": { + "TargetOriginId": "my-origin", + "ViewerProtocolPolicy": "redirect-to-https", + "DefaultTTL": 86400, + "AllowedMethods": {"Quantity": 2, "Items": ["GET", "HEAD"]}, + "ForwardedValues": { + "QueryString": False, + "Cookies": {"Forward": "none"}, + "Headers": {"Quantity": 0}, + }, + "TrustedSigners": {"Enabled": False, "Quantity": 0}, + "MinTTL": 0, + }, + "Comment": "My CloudFront distribution", + "Enabled": True, + } + ) + + distribution_id = response["Distribution"]["Id"] + + # Call remediation script + remediation(event={"Id": distribution_id}, _="") + + updated_response = cloudfront_client.get_distribution_config(Id=distribution_id) + updated_origin_domain = updated_response["DistributionConfig"]["Origins"]["Items"][ + 0 + ]["DomainName"] + assert updated_origin_domain == "cloudfront12remediation.example" diff --git a/source/remediation_runbooks/scripts/test/test_SetS3LifecyclePolicy.py b/source/remediation_runbooks/scripts/test/test_SetS3LifecyclePolicy.py new file mode 100644 index 00000000..1e2d44a8 --- /dev/null +++ b/source/remediation_runbooks/scripts/test/test_SetS3LifecyclePolicy.py @@ -0,0 +1,62 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test the functionality of the `SetS3LifecyclePolicy` remediation script""" + +import boto3 +from botocore.config import Config +from moto import mock_aws +from SetS3LifecyclePolicy import lambda_handler as remediation + +BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + + +@mock_aws +def test_set_lifecycle_policy(): + s3 = boto3.client("s3", config=BOTO_CONFIG) + + bucket_name = "test-bucket" + event = { + "BucketName": bucket_name, + "TargetTransitionDays": 90, + "TargetExpirationDays": 0, + "TargetTransitionStorageClass": "STANDARD_IA", + } + + # Create S3 bucket with no lifecycle policy + s3.create_bucket(Bucket=bucket_name) + + remediation(event, {}) + + lifecycle_config = s3.get_bucket_lifecycle_configuration( + Bucket=bucket_name, + ) + + # Assert the rule is the one we set with the remediation script + assert lifecycle_config["Rules"][0]["ID"] == "S3.13 Remediation Example" + assert "Expiration" not in lifecycle_config["Rules"][0] + + +@mock_aws +def test_set_lifecycle_policy_with_expiration(): + s3 = boto3.client("s3", config=BOTO_CONFIG) + + bucket_name = "test-bucket" + event = { + "BucketName": bucket_name, + "TargetTransitionDays": 90, + "TargetExpirationDays": 90, + "TargetTransitionStorageClass": "STANDARD_IA", + } + + # Create S3 bucket with no lifecycle policy + s3.create_bucket(Bucket=bucket_name) + + remediation(event, {}) + + lifecycle_config = s3.get_bucket_lifecycle_configuration( + Bucket=bucket_name, + ) + + # Assert the rule is the one we set with the remediation script + assert lifecycle_config["Rules"][0]["ID"] == "S3.13 Remediation Example" + assert "Expiration" in lifecycle_config["Rules"][0] diff --git a/source/remediation_runbooks/scripts/test/test_UpdateSecretRotationPeriod.py b/source/remediation_runbooks/scripts/test/test_UpdateSecretRotationPeriod.py new file mode 100644 index 00000000..b49c784e --- /dev/null +++ b/source/remediation_runbooks/scripts/test/test_UpdateSecretRotationPeriod.py @@ -0,0 +1,45 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +import boto3 +from botocore.config import Config +from moto import mock_aws +from UpdateSecretRotationPeriod import lambda_handler as remediation + +BOTO_CONFIG = Config( + retries={"mode": "standard", "max_attempts": 10}, region_name="us-east-1" +) + + +@mock_aws +def test_rotate_secret(): + secretsmanager = boto3.client("secretsmanager", config=BOTO_CONFIG) + + # Create test secret + secret = secretsmanager.create_secret( + Name="test-secret", SecretString="test-secret-value" + ) + + event = {"SecretARN": secret["ARN"], "MaxDaysSinceRotation": 90} + + # Set a rotation period longer than 90 days + secretsmanager.rotate_secret( + SecretId=secret["ARN"], + RotationRules={ + "AutomaticallyAfterDays": 100, + }, + RotateImmediately=False, + ) + + # Verify secret rotation is a value greater than 90 days + original_secret = secretsmanager.describe_secret(SecretId=secret["ARN"]) + + assert original_secret["RotationRules"]["AutomaticallyAfterDays"] > 90 + + # Execute remediation + remediation(event, {}) + + # Verify secret rotation is scheduled for at least every 90 days + secret_updated = secretsmanager.describe_secret(SecretId=secret["ARN"]) + + assert secret_updated["RotationRules"]["AutomaticallyAfterDays"] <= 90 diff --git a/source/remediation_runbooks/scripts/test/test_block_ssm_doc_public_access.py b/source/remediation_runbooks/scripts/test/test_block_ssm_doc_public_access.py new file mode 100644 index 00000000..593839c0 --- /dev/null +++ b/source/remediation_runbooks/scripts/test/test_block_ssm_doc_public_access.py @@ -0,0 +1,67 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test the functionality of the `disable_public_sharing_of_ssm_doc` remediation script""" + +from unittest.mock import patch + +import boto3 +from block_ssm_doc_public_access import lambda_handler +from botocore.config import Config +from botocore.stub import Stubber + + +def test_disable_public_sharing_of_ssm_document(mocker): + BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + ssm = boto3.client("ssm", config=BOTO_CONFIG) + stub_ssm = Stubber(ssm) + clients = {"ssm": ssm} + + document_arn = "arn:aws:ssm:us-east-1:111111111111:document/test" + document_name = "test" + + stub_ssm.add_response( + "describe_document_permission", + describedDocument, + {"Name": document_name, "PermissionType": "Share"}, + ) + + stub_ssm.add_response( + "modify_document_permission", + {}, + { + "Name": document_name, + "AccountIdsToRemove": ["all"], + "PermissionType": "Share", + }, + ) + + stub_ssm.add_response( + "describe_document_permission", + verifyDescribedDocument, + {"Name": document_name, "PermissionType": "Share"}, + ) + + stub_ssm.activate() + + with patch("boto3.client", side_effect=lambda service, **_: clients[service]): + event = {"document_arn": document_arn} + response = lambda_handler(event, {}) + assert response == {"isPublic": "False"} + + +describedDocument = { + "AccountIds": [ + "all", + ], + "AccountSharingInfoList": [ + {"AccountId": "all", "SharedDocumentVersion": "string"}, + ], + "NextToken": "string", +} + + +verifyDescribedDocument = { + "AccountIds": [], + "AccountSharingInfoList": [], + "NextToken": "string", +} diff --git a/source/remediation_runbooks/scripts/test/test_configure_stack_notifications.py b/source/remediation_runbooks/scripts/test/test_configure_stack_notifications.py index 2a2eb964..9fe278ac 100644 --- a/source/remediation_runbooks/scripts/test/test_configure_stack_notifications.py +++ b/source/remediation_runbooks/scripts/test/test_configure_stack_notifications.py @@ -1,145 +1,154 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 """Test the functionality of the `configure_stack_notifications` remediation script""" - -from typing import List from datetime import datetime +from typing import TYPE_CHECKING, List from unittest.mock import patch + import boto3 from botocore.stub import Stubber from configure_stack_notifications import lambda_handler -@patch('boto3.resource') +if TYPE_CHECKING: + from mypy_boto3_cloudformation.literals import StackStatusType + from mypy_boto3_cloudformation.type_defs import DescribeStacksOutputTypeDef +else: + StackStatusType = object + DescribeStacksOutputTypeDef = object + + +@patch("boto3.resource") def test_configures_stack(mock_resource): """Update stack called with notification topic ARN""" - cfn = boto3.client('cloudformation') + cfn = boto3.client("cloudformation") stub_cfn = Stubber(cfn) - sns = boto3.client('sns') + sns = boto3.client("sns") stub_sns = Stubber(sns) - clients = { 'cloudformation': cfn, 'sns': sns } + clients = {"cloudformation": cfn, "sns": sns} - stack_arn = 'blah' - topic_name = 'a_topic' - topic_arn = 'topic_arn' + stack_arn = "blah" + topic_name = "a_topic" + topic_arn = "topic_arn" mock_resource.return_value.Stack.return_value.parameters = None mock_resource.return_value.Stack.return_value.capabilities = [] mock_resource.return_value.Stack.return_value.notification_arns = [topic_arn] - stub_sns.add_response('create_topic', { 'TopicArn': topic_arn }, { 'Name': topic_name }) - stub_cfn.add_response('describe_stacks', - describe_stacks_response(stack_arn, 'UPDATE_COMPLETE', [topic_arn]), - { 'StackName': stack_arn }) + stub_sns.add_response("create_topic", {"TopicArn": topic_arn}, {"Name": topic_name}) + stub_cfn.add_response( + "describe_stacks", + describe_stacks_response(stack_arn, "UPDATE_COMPLETE", [topic_arn]), + {"StackName": stack_arn}, + ) stub_cfn.activate() stub_sns.activate() - with patch('boto3.client', side_effect=lambda service, **_ : clients[service]): - event = { 'stack_arn': stack_arn, 'topic_name': topic_name } + with patch("boto3.client", side_effect=lambda service, **_: clients[service]): + event = {"stack_arn": stack_arn, "topic_name": topic_name} response = lambda_handler(event, {}) - assert response == { 'NotificationARNs': [topic_arn] } + assert response == {"NotificationARNs": [topic_arn]} mock_resource.return_value.Stack.return_value.update.assert_called_once_with( - UsePreviousTemplate=True, - NotificationARNs=[topic_arn]) + UsePreviousTemplate=True, NotificationARNs=[topic_arn] + ) + -@patch('boto3.resource') +@patch("boto3.resource") def test_configures_with_parameters(mock_resource): """Update stack called stack parameters""" - cfn = boto3.client('cloudformation') + cfn = boto3.client("cloudformation") stub_cfn = Stubber(cfn) - sns = boto3.client('sns') + sns = boto3.client("sns") stub_sns = Stubber(sns) - clients = { 'cloudformation': cfn, 'sns': sns } + clients = {"cloudformation": cfn, "sns": sns} - stack_arn = 'blah' - topic_name = 'a_topic' - topic_arn = 'topic_arn' + stack_arn = "blah" + topic_name = "a_topic" + topic_arn = "topic_arn" mock_resource.return_value.Stack.return_value.parameters = [ - { - 'ParameterKey': 'a_key', - 'ParameterValue': 'a_value' - }, - { - 'ParameterKey': 'another_key', - 'ResolvedValue': 'another_value' - } + {"ParameterKey": "a_key", "ParameterValue": "a_value"}, + {"ParameterKey": "another_key", "ResolvedValue": "another_value"}, ] mock_resource.return_value.Stack.return_value.capabilities = [] mock_resource.return_value.Stack.return_value.notification_arns = [topic_arn] - stub_sns.add_response('create_topic', { 'TopicArn': topic_arn }, { 'Name': topic_name }) - stub_cfn.add_response('describe_stacks', - describe_stacks_response(stack_arn, 'UPDATE_COMPLETE', [topic_arn]), - { 'StackName': stack_arn }) + stub_sns.add_response("create_topic", {"TopicArn": topic_arn}, {"Name": topic_name}) + stub_cfn.add_response( + "describe_stacks", + describe_stacks_response(stack_arn, "UPDATE_COMPLETE", [topic_arn]), + {"StackName": stack_arn}, + ) stub_cfn.activate() stub_sns.activate() - with patch('boto3.client', side_effect=lambda service, **_ : clients[service]): - event = { 'stack_arn': stack_arn, 'topic_name': topic_name } + with patch("boto3.client", side_effect=lambda service, **_: clients[service]): + event = {"stack_arn": stack_arn, "topic_name": topic_name} response = lambda_handler(event, {}) - assert response == { 'NotificationARNs': [topic_arn] } + assert response == {"NotificationARNs": [topic_arn]} mock_resource.return_value.Stack.return_value.update.assert_called_once_with( UsePreviousTemplate=True, Parameters=[ - { - 'ParameterKey': 'a_key', - 'UsePreviousValue': True - }, - { - 'ParameterKey': 'another_key', - 'UsePreviousValue': True - } + {"ParameterKey": "a_key", "UsePreviousValue": True}, + {"ParameterKey": "another_key", "UsePreviousValue": True}, ], - NotificationARNs=[topic_arn]) + NotificationARNs=[topic_arn], + ) -@patch('boto3.resource') + +@patch("boto3.resource") def test_configures_with_capabilities(mock_resource): """Update stack called with stack capabilities""" - cfn = boto3.client('cloudformation') + cfn = boto3.client("cloudformation") stub_cfn = Stubber(cfn) - sns = boto3.client('sns') + sns = boto3.client("sns") stub_sns = Stubber(sns) - clients = { 'cloudformation': cfn, 'sns': sns } + clients = {"cloudformation": cfn, "sns": sns} - stack_arn = 'blah' - topic_name = 'a_topic' - topic_arn = 'topic_arn' + stack_arn = "blah" + topic_name = "a_topic" + topic_arn = "topic_arn" mock_resource.return_value.Stack.return_value.parameters = None mock_resource.return_value.Stack.return_value.capabilities = [ - 'CAPABILITY_IAM', - 'CAPABILITY_NAMED_IAM', - 'CAPABILITY_AUTO_EXPAND' + "CAPABILITY_IAM", + "CAPABILITY_NAMED_IAM", + "CAPABILITY_AUTO_EXPAND", ] mock_resource.return_value.Stack.return_value.notification_arns = [topic_arn] - stub_sns.add_response('create_topic', { 'TopicArn': topic_arn }, { 'Name': topic_name }) - stub_cfn.add_response('describe_stacks', - describe_stacks_response(stack_arn, 'UPDATE_COMPLETE', [topic_arn]), - { 'StackName': stack_arn }) + stub_sns.add_response("create_topic", {"TopicArn": topic_arn}, {"Name": topic_name}) + stub_cfn.add_response( + "describe_stacks", + describe_stacks_response(stack_arn, "UPDATE_COMPLETE", [topic_arn]), + {"StackName": stack_arn}, + ) stub_cfn.activate() stub_sns.activate() - with patch('boto3.client', side_effect=lambda service, **_ : clients[service]): - event = { 'stack_arn': stack_arn, 'topic_name': topic_name } + with patch("boto3.client", side_effect=lambda service, **_: clients[service]): + event = {"stack_arn": stack_arn, "topic_name": topic_name} response = lambda_handler(event, {}) - assert response == { 'NotificationARNs': [topic_arn] } + assert response == {"NotificationARNs": [topic_arn]} mock_resource.return_value.Stack.return_value.update.assert_called_once_with( UsePreviousTemplate=True, Capabilities=[ - 'CAPABILITY_IAM', - 'CAPABILITY_NAMED_IAM', - 'CAPABILITY_AUTO_EXPAND' + "CAPABILITY_IAM", + "CAPABILITY_NAMED_IAM", + "CAPABILITY_AUTO_EXPAND", ], - NotificationARNs=[topic_arn]) + NotificationARNs=[topic_arn], + ) + -def describe_stacks_response(stack_arn: str, stack_status: str, notification_arns: List[str]): +def describe_stacks_response( + stack_arn: str, stack_status: StackStatusType, notification_arns: List[str] +) -> DescribeStacksOutputTypeDef: """ The response from a call to `describe_stacks` with the following properties substituted: @@ -172,47 +181,49 @@ def describe_stacks_response(stack_arn: str, stack_status: str, notification_arn `notification_arns`: the SNS topic ARNs configured for notifications from this stack """ - return { - 'Stacks': [ + # ignore incorrect type error for missing key NextToken + return { # type: ignore[typeddict-item] + "Stacks": [ { - 'StackId': stack_arn, - 'StackName': 'string', - 'ChangeSetId': 'string', - 'Description': 'string', - 'Parameters': [], - 'CreationTime': datetime(2015, 1, 1), - 'DeletionTime': datetime(2015, 1, 1), - 'LastUpdatedTime': datetime(2015, 1, 1), - 'RollbackConfiguration': { - 'RollbackTriggers': [ - { - 'Arn': 'string', - 'Type': 'string' - }, + "StackId": stack_arn, + "StackName": "string", + "ChangeSetId": "string", + "Description": "string", + "Parameters": [], + "CreationTime": datetime(2015, 1, 1), + "DeletionTime": datetime(2015, 1, 1), + "LastUpdatedTime": datetime(2015, 1, 1), + "RollbackConfiguration": { + "RollbackTriggers": [ + {"Arn": "string", "Type": "string"}, ], - 'MonitoringTimeInMinutes': 123 + "MonitoringTimeInMinutes": 123, }, - 'StackStatus': stack_status, - 'StackStatusReason': 'string', - 'DisableRollback': False, - 'NotificationARNs': notification_arns, - 'TimeoutInMinutes': 123, - 'Capabilities': [], - 'Outputs': [], - 'RoleARN': 'a_real_role_arn_no_really', - 'Tags': [ - { - 'Key': 'string', - 'Value': 'string' - }, + "StackStatus": stack_status, + "StackStatusReason": "string", + "DisableRollback": False, + "NotificationARNs": notification_arns, + "TimeoutInMinutes": 123, + "Capabilities": [], + "Outputs": [], + "RoleARN": "a_real_role_arn_no_really", + "Tags": [ + {"Key": "string", "Value": "string"}, ], - 'EnableTerminationProtection': False, - 'ParentId': 'string', - 'RootId': 'string', - 'DriftInformation': { - 'StackDriftStatus': 'IN_SYNC', - 'LastCheckTimestamp': datetime(2015, 1, 1) - } + "EnableTerminationProtection": False, + "ParentId": "string", + "RootId": "string", + "DriftInformation": { + "StackDriftStatus": "IN_SYNC", + "LastCheckTimestamp": datetime(2015, 1, 1), + }, } - ] + ], + "ResponseMetadata": { + "RequestId": "", + "HostId": "", + "HTTPStatusCode": 200, + "HTTPHeaders": {}, + "RetryAttempts": 0, + }, } diff --git a/source/remediation_runbooks/scripts/test/test_createaccessloggingbucket.py b/source/remediation_runbooks/scripts/test/test_createaccessloggingbucket.py index ca049242..02b09f00 100644 --- a/source/remediation_runbooks/scripts/test/test_createaccessloggingbucket.py +++ b/source/remediation_runbooks/scripts/test/test_createaccessloggingbucket.py @@ -1,21 +1,22 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 +from typing import TYPE_CHECKING, Any, Dict + import boto3 import botocore.session +import CreateAccessLoggingBucket_createloggingbucket as script +from aws_lambda_powertools.utilities.typing import LambdaContext from botocore.config import Config from botocore.stub import Stubber -from moto import mock_s3 -from pytest_mock import mocker +from CreateAccessLoggingBucket_createloggingbucket import Event +from moto import mock_aws from pytest import raises -from typing import TYPE_CHECKING if TYPE_CHECKING: from mypy_boto3_s3.type_defs import GetBucketEncryptionOutputTypeDef else: GetBucketEncryptionOutputTypeDef = object -import CreateAccessLoggingBucket_createloggingbucket as script - def is_sse_s3_encrypted(config: GetBucketEncryptionOutputTypeDef) -> bool: rules = config["ServerSideEncryptionConfiguration"]["Rules"] @@ -30,16 +31,16 @@ def is_sse_s3_encrypted(config: GetBucketEncryptionOutputTypeDef) -> bool: return False -@mock_s3 def test_bucket_created_with_encryption() -> None: bucket_name = "my-bucket" - event = {"BucketName": bucket_name, "AWS_REGION": "us-east-1"} + event: Event = {"BucketName": bucket_name, "AWS_REGION": "us-east-1"} - script.create_logging_bucket(event, None) + with mock_aws(): + script.create_logging_bucket(event, LambdaContext()) - s3 = boto3.client("s3") - bucket_encryption = s3.get_bucket_encryption(Bucket=bucket_name) - assert is_sse_s3_encrypted(bucket_encryption) + s3 = boto3.client("s3") + bucket_encryption = s3.get_bucket_encryption(Bucket=bucket_name) + assert is_sse_s3_encrypted(bucket_encryption) def get_region() -> str: @@ -48,9 +49,7 @@ def get_region() -> str: def test_create_logging_bucket(mocker): - event = { - "SolutionId": "SO0000", - "SolutionVersion": "1.2.3", + event: Event = { "BucketName": "mahbukkit", "AWS_REGION": get_region(), } @@ -58,7 +57,7 @@ def test_create_logging_bucket(mocker): s3 = botocore.session.get_session().create_client("s3", config=BOTO_CONFIG) s3_stubber = Stubber(s3) - kwargs = { + kwargs: Dict[str, Any] = { "Bucket": event["BucketName"], "GrantWrite": "uri=http://acs.amazonaws.com/groups/s3/LogDelivery", "GrantReadACP": "uri=http://acs.amazonaws.com/groups/s3/LogDelivery", @@ -85,15 +84,13 @@ def test_create_logging_bucket(mocker): mocker.patch( "CreateAccessLoggingBucket_createloggingbucket.connect_to_s3", return_value=s3 ) - script.create_logging_bucket(event, {}) + script.create_logging_bucket(event, LambdaContext()) s3_stubber.assert_no_pending_responses() s3_stubber.deactivate() def test_bucket_already_exists(mocker): - event = { - "SolutionId": "SO0000", - "SolutionVersion": "1.2.3", + event: Event = { "BucketName": "mahbukkit", "AWS_REGION": get_region(), } @@ -109,15 +106,13 @@ def test_bucket_already_exists(mocker): "CreateAccessLoggingBucket_createloggingbucket.connect_to_s3", return_value=s3 ) with raises(SystemExit): - script.create_logging_bucket(event, {}) + script.create_logging_bucket(event, LambdaContext()) s3_stubber.assert_no_pending_responses() s3_stubber.deactivate() def test_bucket_already_owned_by_you(mocker): - event = { - "SolutionId": "SO0000", - "SolutionVersion": "1.2.3", + event: Event = { "BucketName": "mahbukkit", "AWS_REGION": get_region(), } @@ -132,6 +127,6 @@ def test_bucket_already_owned_by_you(mocker): mocker.patch( "CreateAccessLoggingBucket_createloggingbucket.connect_to_s3", return_value=s3 ) - script.create_logging_bucket(event, {}) + script.create_logging_bucket(event, LambdaContext()) s3_stubber.assert_no_pending_responses() s3_stubber.deactivate() diff --git a/source/remediation_runbooks/scripts/test/test_createcloudtrailmultiregiontrail.py b/source/remediation_runbooks/scripts/test/test_createcloudtrailmultiregiontrail.py index 19ed3f2b..fe39ba08 100644 --- a/source/remediation_runbooks/scripts/test/test_createcloudtrailmultiregiontrail.py +++ b/source/remediation_runbooks/scripts/test/test_createcloudtrailmultiregiontrail.py @@ -1,18 +1,25 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import boto3 import json -import botocore.session -from botocore.stub import Stubber -from botocore.config import Config -import pytest -from pytest_mock import mocker +from typing import TYPE_CHECKING, Any, Dict +import boto3 +import botocore.session import CreateCloudTrailMultiRegionTrail_createcloudtrailbucket as createcloudtrailbucket import CreateCloudTrailMultiRegionTrail_createcloudtrailbucketpolicy as createcloudtrailbucketpolicy import CreateCloudTrailMultiRegionTrail_createloggingbucket as createloggingbucket import CreateCloudTrailMultiRegionTrail_enablecloudtrail as enablecloudtrail import CreateCloudTrailMultiRegionTrail_process_results as process_results +import pytest +from aws_lambda_powertools.utilities.typing import LambdaContext +from botocore.config import Config +from botocore.stub import Stubber +from CreateCloudTrailMultiRegionTrail_createloggingbucket import Event + +if TYPE_CHECKING: + from mypy_boto3_s3.client import S3Client +else: + S3Client = object def get_region() -> str: @@ -36,7 +43,10 @@ def test_create_encrypted_bucket(mocker): s3 = botocore.session.get_session().create_client("s3", config=BOTO_CONFIG) s3_stubber = Stubber(s3) - kwargs = {"Bucket": "so0111-aws-cloudtrail-111111111111", "ACL": "private"} + kwargs: Dict[str, Any] = { + "Bucket": "so0111-aws-cloudtrail-111111111111", + "ACL": "private", + } if get_region() != "us-east-1": kwargs["CreateBucketConfiguration"] = {"LocationConstraint": get_region()} @@ -110,7 +120,10 @@ def test_bucket_already_exists(mocker): s3 = botocore.session.get_session().create_client("s3", config=BOTO_CONFIG) s3_stubber = Stubber(s3) - kwargs = {"Bucket": "so0111-aws-cloudtrail-111111111111", "ACL": "private"} + kwargs: Dict[str, Any] = { + "Bucket": "so0111-aws-cloudtrail-111111111111", + "ACL": "private", + } if get_region() != "us-east-1": kwargs["CreateBucketConfiguration"] = {"LocationConstraint": get_region()} @@ -139,7 +152,10 @@ def test_bucket_already_owned_by_you(mocker): s3 = botocore.session.get_session().create_client("s3", config=BOTO_CONFIG) s3_stubber = Stubber(s3) - kwargs = {"Bucket": "so0111-aws-cloudtrail-111111111111", "ACL": "private"} + kwargs: Dict[str, Any] = { + "Bucket": "so0111-aws-cloudtrail-111111111111", + "ACL": "private", + } if get_region() != "us-east-1": kwargs["CreateBucketConfiguration"] = {"LocationConstraint": get_region()} @@ -202,7 +218,10 @@ def test_create_bucket_policy(mocker): s3 = botocore.session.get_session().create_client("s3", config=BOTO_CONFIG) s3_stubber = Stubber(s3) - kwargs = {"Bucket": "so0111-aws-cloudtrail-111111111111", "ACL": "private"} + kwargs: Dict[str, Any] = { + "Bucket": "so0111-aws-cloudtrail-111111111111", + "ACL": "private", + } if get_region() != "us-east-1": kwargs["CreateBucketConfiguration"] = {"LocationConstraint": get_region()} @@ -226,39 +245,15 @@ def test_create_bucket_policy(mocker): # CreateCloudTrailMultiRegionTrail_createloggingbucket # ===================================================================================== def test_create_logging_bucket(mocker): - event = { - "SolutionId": "SO0000", - "SolutionVersion": "1.2.3", + event: Event = { "region": get_region(), "kms_key_arn": "arn:aws:kms:us-east-1:111111111111:key/1234abcd-12ab-34cd-56ef-1234567890ab", "account": "111111111111", } - bucket_policy = { - "Version": "2012-10-17", - "Statement": [ - { - "Sid": "AWSCloudTrailAclCheck20150319", - "Effect": "Allow", - "Principal": {"Service": ["cloudtrail.amazonaws.com"]}, - "Action": "s3:GetBucketAcl", - "Resource": "arn:aws:s3:::mahbukkit", - }, - { - "Sid": "AWSCloudTrailWrite20150319", - "Effect": "Allow", - "Principal": {"Service": ["cloudtrail.amazonaws.com"]}, - "Action": "s3:PutObject", - "Resource": "arn:aws:s3:::mahbukkit/AWSLogs/111111111111/*", - "Condition": { - "StringEquals": {"s3:x-amz-acl": "bucket-owner-full-control"} - }, - }, - ], - } BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=get_region()) s3 = botocore.session.get_session().create_client("s3", config=BOTO_CONFIG) - kwargs = { + kwargs: Dict[str, Any] = { "Bucket": "so0111-access-logs-" + get_region() + "-111111111111", "ACL": "private", "ObjectOwnership": "ObjectWriter", @@ -316,7 +311,7 @@ def test_create_logging_bucket(mocker): "CreateCloudTrailMultiRegionTrail_createloggingbucket.connect_to_s3", return_value=s3, ) - createloggingbucket.create_logging_bucket(event, {}) + createloggingbucket.create_logging_bucket(event, LambdaContext()) s3_stubber.assert_no_pending_responses() s3_stubber.deactivate() @@ -387,13 +382,13 @@ def test_put_bucket_acl_fails(): Verify proper exit when put_bucket_acl fails """ - s3 = botocore.session.get_session().create_client("s3") + s3: S3Client = boto3.client("s3") s3_stubber = Stubber(s3) s3_stubber.add_client_error("put_bucket_acl", "ADoorIsAjar") s3_stubber.activate() with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = createloggingbucket.put_bucket_acl(s3, "mahbukkit") + createloggingbucket.put_bucket_acl(s3, "mahbukkit") assert pytest_wrapped_e.type == SystemExit assert ( pytest_wrapped_e.value.code @@ -408,13 +403,13 @@ def test_put_access_blocks_fails(): Verify proper exit when put_public_access_blocks fails """ - s3 = botocore.session.get_session().create_client("s3") + s3: S3Client = boto3.client("s3") s3_stubber = Stubber(s3) s3_stubber.add_client_error("put_public_access_block", "ADoorIsAjar") s3_stubber.activate() with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = createloggingbucket.put_access_block(s3, "mahbukkit") + createloggingbucket.put_access_block(s3, "mahbukkit") assert pytest_wrapped_e.type == SystemExit assert ( pytest_wrapped_e.value.code @@ -429,13 +424,13 @@ def test_encrypt_bucket_fails(): Verify proper exit when put_bucket_encryption fails """ - s3 = botocore.session.get_session().create_client("s3") + s3: S3Client = boto3.client("s3") s3_stubber = Stubber(s3) s3_stubber.add_client_error("put_bucket_encryption", "ADoorIsAjar") s3_stubber.activate() with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = createloggingbucket.encrypt_bucket( + createloggingbucket.encrypt_bucket( s3, "mahbukkit", "arn:aws:kms:us-east-1:111111111111:key/mahcryptionkey" ) assert pytest_wrapped_e.type == SystemExit diff --git a/source/remediation_runbooks/scripts/test/test_createlogmetricfilterandalarm.py b/source/remediation_runbooks/scripts/test/test_createlogmetricfilterandalarm.py index 25bc73e5..68bb5fe1 100644 --- a/source/remediation_runbooks/scripts/test/test_createlogmetricfilterandalarm.py +++ b/source/remediation_runbooks/scripts/test/test_createlogmetricfilterandalarm.py @@ -1,271 +1,277 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 +from typing import Dict + import boto3 import botocore.session -from botocore.stub import Stubber -from botocore.config import Config -import pytest - import CreateLogMetricFilterAndAlarm as logMetricAlarm import CreateLogMetricFilterAndAlarm_createtopic as topicutil -import unittest +import pytest +from botocore.config import Config +from botocore.stub import Stubber my_session = boto3.session.Session() my_region = my_session.region_name -def test_verify(mocker): +def test_verify(mocker): event = { - 'FilterName': 'test_filter', - 'FilterPattern': 'test_pattern', - 'MetricName': 'test_metric', - 'MetricNamespace': 'test_metricnamespace', - 'MetricValue': 'test_metric_value', - 'AlarmName': 'test_alarm', - 'AlarmDesc': 'alarm_desc', - 'AlarmThreshold': 'alarm_threshold', - 'LogGroupName': 'test_log', - 'TopicArn': 'arn:aws:sns:us-east-1:111111111111:test-topic-name' + "FilterName": "test_filter", + "FilterPattern": "test_pattern", + "MetricName": "test_metric", + "MetricNamespace": "test_metricnamespace", + "MetricValue": "test_metric_value", + "AlarmName": "test_alarm", + "AlarmDesc": "alarm_desc", + "AlarmThreshold": "alarm_threshold", + "LogGroupName": "test_log", + "TopicArn": "arn:aws:sns:us-east-1:111111111111:test-topic-name", } - context = {} - mocker.patch('CreateLogMetricFilterAndAlarm.put_metric_filter') - mocker.patch('CreateLogMetricFilterAndAlarm.put_metric_alarm') - metric_filter_spy = mocker.spy(logMetricAlarm, 'put_metric_filter') - metric_alarm_spy = mocker.spy(logMetricAlarm, 'put_metric_alarm') + context: Dict[str, str] = {} + mocker.patch("CreateLogMetricFilterAndAlarm.put_metric_filter") + mocker.patch("CreateLogMetricFilterAndAlarm.put_metric_alarm") + metric_filter_spy = mocker.spy(logMetricAlarm, "put_metric_filter") + metric_alarm_spy = mocker.spy(logMetricAlarm, "put_metric_alarm") logMetricAlarm.verify(event, context) - metric_filter_spy.assert_called_once_with('test_log', 'test_filter', 'test_pattern', 'test_metric', 'test_metricnamespace', 'test_metric_value') - metric_alarm_spy.assert_called_once_with('test_alarm', 'alarm_desc', 'alarm_threshold', 'test_metric', 'test_metricnamespace', 'arn:aws:sns:us-east-1:111111111111:test-topic-name') + metric_filter_spy.assert_called_once_with( + "test_log", + "test_filter", + "test_pattern", + "test_metric", + "test_metricnamespace", + "test_metric_value", + ) + metric_alarm_spy.assert_called_once_with( + "test_alarm", + "alarm_desc", + "alarm_threshold", + "test_metric", + "test_metricnamespace", + "arn:aws:sns:us-east-1:111111111111:test-topic-name", + ) def test_put_metric_filter_pass(mocker): event = { - 'FilterName': 'test_filter', - 'FilterPattern': 'test_pattern', - 'MetricName': 'test_metric', - 'MetricNamespace': 'test_metricnamespace', - 'MetricValue': 'test_metric_value', - 'AlarmName': 'test_alarm', - 'AlarmDesc': 'alarm_desc', - 'AlarmThreshold': 'alarm_threshold', - 'LogGroupName': 'test_log', - 'TopicArn': 'arn:aws:sns:us-east-1:111111111111:test-topic-name' + "FilterName": "test_filter", + "FilterPattern": "test_pattern", + "MetricName": "test_metric", + "MetricNamespace": "test_metricnamespace", + "MetricValue": "test_metric_value", + "AlarmName": "test_alarm", + "AlarmDesc": "alarm_desc", + "AlarmThreshold": "alarm_threshold", + "LogGroupName": "test_log", + "TopicArn": "arn:aws:sns:us-east-1:111111111111:test-topic-name", } - BOTO_CONFIG = Config( - retries={ - 'mode': 'standard' - }, - region_name=my_region - ) - logs = botocore.session.get_session().create_client('logs', config=BOTO_CONFIG) + BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) + logs = botocore.session.get_session().create_client("logs", config=BOTO_CONFIG) logs_stubber = Stubber(logs) logs_stubber.add_response( - 'put_metric_filter', + "put_metric_filter", {}, { - 'logGroupName': event['LogGroupName'], - 'filterName': event['FilterName'], - 'filterPattern': event['FilterPattern'], - 'metricTransformations': [ + "logGroupName": event["LogGroupName"], + "filterName": event["FilterName"], + "filterPattern": event["FilterPattern"], + "metricTransformations": [ { - 'metricName': event['MetricName'], - 'metricNamespace':event['MetricNamespace'], - 'metricValue': str (event['MetricValue']), - 'unit': 'Count' + "metricName": event["MetricName"], + "metricNamespace": event["MetricNamespace"], + "metricValue": str(event["MetricValue"]), + "unit": "Count", } - ] - } + ], + }, ) logs_stubber.activate() - mocker.patch('CreateLogMetricFilterAndAlarm.get_service_client', return_value = logs ) + mocker.patch("CreateLogMetricFilterAndAlarm.get_service_client", return_value=logs) logMetricAlarm.put_metric_filter( - event['LogGroupName'], event['FilterName'], event['FilterPattern'], - event['MetricName'], event['MetricNamespace'], event['MetricValue'] + event["LogGroupName"], + event["FilterName"], + event["FilterPattern"], + event["MetricName"], + event["MetricNamespace"], + event["MetricValue"], ) - assert logs_stubber.assert_no_pending_responses() is None + logs_stubber.assert_no_pending_responses() logs_stubber.deactivate() def test_put_metric_filter_error(mocker): event = { - 'FilterName': 'test_filter', - 'FilterPattern': 'test_pattern', - 'MetricName': 'test_metric', - 'MetricNamespace': 'test_metricnamespace', - 'MetricValue': 'test_metric_value', - 'AlarmName': 'test_alarm', - 'AlarmDesc': 'alarm_desc', - 'AlarmThreshold': 'alarm_threshold', - 'LogGroupName': 'test_log', - 'TopicArn': 'arn:aws:sns:us-east-1:111111111111:test-topic-name' + "FilterName": "test_filter", + "FilterPattern": "test_pattern", + "MetricName": "test_metric", + "MetricNamespace": "test_metricnamespace", + "MetricValue": "test_metric_value", + "AlarmName": "test_alarm", + "AlarmDesc": "alarm_desc", + "AlarmThreshold": "alarm_threshold", + "LogGroupName": "test_log", + "TopicArn": "arn:aws:sns:us-east-1:111111111111:test-topic-name", } - BOTO_CONFIG = Config( - retries={ - 'mode': 'standard' - }, - region_name=my_region - ) - logs = botocore.session.get_session().create_client('logs', config=BOTO_CONFIG) + BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) + logs = botocore.session.get_session().create_client("logs", config=BOTO_CONFIG) logs_stubber = Stubber(logs) - logs_stubber.add_client_error( - 'put_metric_filter', - 'CannotAddFilter' - ) + logs_stubber.add_client_error("put_metric_filter", "CannotAddFilter") logs_stubber.activate() - mocker.patch('CreateLogMetricFilterAndAlarm.get_service_client', return_value=logs) + mocker.patch("CreateLogMetricFilterAndAlarm.get_service_client", return_value=logs) with pytest.raises(SystemExit) as pytest_wrapped_exception: logMetricAlarm.put_metric_filter( - event['LogGroupName'], event['FilterName'], event['FilterPattern'], - event['MetricName'], event['MetricNamespace'], event['MetricValue'] + event["LogGroupName"], + event["FilterName"], + event["FilterPattern"], + event["MetricName"], + event["MetricNamespace"], + event["MetricValue"], ) assert pytest_wrapped_exception.type == SystemExit def test_put_metric_alarm(mocker): event = { - 'FilterName': 'test_filter', - 'FilterPattern': 'test_pattern', - 'MetricName': 'test_metric', - 'MetricNamespace': 'test_metricnamespace', - 'MetricValue': 'test_metric_value', - 'AlarmName': 'test_alarm', - 'AlarmDesc': 'alarm_desc', - 'AlarmThreshold': 1, - 'LogGroupName': 'test_log', - 'TopicArn': 'arn:aws:sns:us-east-1:111111111111:test-topic-name' + "FilterName": "test_filter", + "FilterPattern": "test_pattern", + "MetricName": "test_metric", + "MetricNamespace": "test_metricnamespace", + "MetricValue": "test_metric_value", + "AlarmName": "test_alarm", + "AlarmDesc": "alarm_desc", + "AlarmThreshold": 1, + "LogGroupName": "test_log", + "TopicArn": "arn:aws:sns:us-east-1:111111111111:test-topic-name", } - BOTO_CONFIG = Config( - retries={ - 'mode': 'standard' - }, - region_name=my_region + BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) + cloudwatch = botocore.session.get_session().create_client( + "cloudwatch", config=BOTO_CONFIG ) - cloudwatch = botocore.session.get_session().create_client('cloudwatch', config=BOTO_CONFIG) cloudwatch_stubber = Stubber(cloudwatch) cloudwatch_stubber.add_response( - 'put_metric_alarm', + "put_metric_alarm", {}, { - 'AlarmName': event['AlarmName'], - 'AlarmDescription': event['AlarmDesc'], - 'ActionsEnabled': True, - 'OKActions': [ - 'arn:aws:sns:us-east-1:111111111111:test-topic-name' - ], - 'AlarmActions': [ - 'arn:aws:sns:us-east-1:111111111111:test-topic-name' - ], - 'MetricName': event['MetricName'], - 'Namespace': event['MetricNamespace'], - 'Statistic': 'Sum', - 'Period': 300, - 'Unit': 'Count', - 'EvaluationPeriods': 12, - 'DatapointsToAlarm': 1, - 'Threshold': (event['AlarmThreshold']), - 'ComparisonOperator': 'GreaterThanOrEqualToThreshold', - 'TreatMissingData': 'notBreaching' - } + "AlarmName": event["AlarmName"], + "AlarmDescription": event["AlarmDesc"], + "ActionsEnabled": True, + "OKActions": ["arn:aws:sns:us-east-1:111111111111:test-topic-name"], + "AlarmActions": ["arn:aws:sns:us-east-1:111111111111:test-topic-name"], + "MetricName": event["MetricName"], + "Namespace": event["MetricNamespace"], + "Statistic": "Sum", + "Period": 300, + "Unit": "Count", + "EvaluationPeriods": 12, + "DatapointsToAlarm": 1, + "Threshold": (event["AlarmThreshold"]), + "ComparisonOperator": "GreaterThanOrEqualToThreshold", + "TreatMissingData": "notBreaching", + }, ) cloudwatch_stubber.activate() - mocker.patch('CreateLogMetricFilterAndAlarm.get_service_client', return_value=cloudwatch) + mocker.patch( + "CreateLogMetricFilterAndAlarm.get_service_client", return_value=cloudwatch + ) logMetricAlarm.put_metric_alarm( - event['AlarmName'], event['AlarmDesc'], event['AlarmThreshold'], - event['MetricName'], event['MetricNamespace'], event['TopicArn'] + event["AlarmName"], + event["AlarmDesc"], + event["AlarmThreshold"], + event["MetricName"], + event["MetricNamespace"], + event["TopicArn"], ) - assert cloudwatch_stubber.assert_no_pending_responses() is None + cloudwatch_stubber.assert_no_pending_responses() cloudwatch_stubber.deactivate() def test_put_metric_alarm_error(mocker): event = { - 'FilterName': 'test_filter', - 'FilterPattern': 'test_pattern', - 'MetricName': 'test_metric', - 'MetricNamespace': 'test_metricnamespace', - 'MetricValue': 'test_metric_value', - 'AlarmName': 'test_alarm', - 'AlarmDesc': 'alarm_desc', - 'AlarmThreshold': 1, - 'LogGroupName': 'test_log', - 'TopicArn': 'arn:aws:sns:us-east-1:111111111111:test-topic-name' + "FilterName": "test_filter", + "FilterPattern": "test_pattern", + "MetricName": "test_metric", + "MetricNamespace": "test_metricnamespace", + "MetricValue": "test_metric_value", + "AlarmName": "test_alarm", + "AlarmDesc": "alarm_desc", + "AlarmThreshold": 1, + "LogGroupName": "test_log", + "TopicArn": "arn:aws:sns:us-east-1:111111111111:test-topic-name", } - BOTO_CONFIG = Config( - retries={ - 'mode': 'standard' - }, - region_name=my_region + BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) + cloudwatch = botocore.session.get_session().create_client( + "cloudwatch", config=BOTO_CONFIG ) - cloudwatch = botocore.session.get_session().create_client('cloudwatch', config=BOTO_CONFIG) cloudwatch_stubber = Stubber(cloudwatch) - cloudwatch_stubber.add_client_error( - 'put_metric_alarm', - 'CannotAddAlarm' - ) + cloudwatch_stubber.add_client_error("put_metric_alarm", "CannotAddAlarm") cloudwatch_stubber.activate() - mocker.patch('CreateLogMetricFilterAndAlarm.get_service_client', return_value=cloudwatch) + mocker.patch( + "CreateLogMetricFilterAndAlarm.get_service_client", return_value=cloudwatch + ) with pytest.raises(SystemExit) as pytest_wrapped_exception: logMetricAlarm.put_metric_alarm( - event['AlarmName'], event['AlarmDesc'], event['AlarmThreshold'], - event['MetricName'], event['MetricNamespace'], event['TopicArn'] + event["AlarmName"], + event["AlarmDesc"], + event["AlarmThreshold"], + event["MetricName"], + event["MetricNamespace"], + event["TopicArn"], ) assert pytest_wrapped_exception.type == SystemExit cloudwatch_stubber.deactivate() + def topic_event(): return { - 'topic_name': 'sharr-test-topic', - 'kms_key_arn': 'arn:aws:kms:ap-northeast-1:111122223333:key/foobarbaz' + "topic_name": "sharr-test-topic", + "kms_key_arn": "arn:aws:kms:ap-northeast-1:111122223333:key/foobarbaz", } + def test_create_new_topic(mocker): - BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region - ) - ssm_client = botocore.session.get_session().create_client('ssm', config=BOTO_CONFIG) + BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) + ssm_client = botocore.session.get_session().create_client("ssm", config=BOTO_CONFIG) ssm_stubber = Stubber(ssm_client) ssm_stubber.add_response( - 'put_parameter', + "put_parameter", {}, { - 'Name': '/Solutions/SO0111/SNS_Topic_CIS3.x', - 'Description': 'SNS Topic for AWS Config updates', - 'Type': 'String', - 'Overwrite': True, - 'Value': 'arn:aws:sns:us-east-1:111111111111:sharr-test-topic' - } + "Name": "/Solutions/SO0111/SNS_Topic_CIS3.x", + "Description": "SNS Topic for AWS Config updates", + "Type": "String", + "Overwrite": True, + "Value": "arn:aws:sns:us-east-1:111111111111:sharr-test-topic", + }, ) ssm_stubber.activate() - sns_client = botocore.session.get_session().create_client('sns', config=BOTO_CONFIG) + sns_client = botocore.session.get_session().create_client("sns", config=BOTO_CONFIG) sns_stubber = Stubber(sns_client) sns_stubber.add_response( - 'create_topic', - { - 'TopicArn': 'arn:aws:sns:us-east-1:111111111111:sharr-test-topic' - } - ) - sns_stubber.add_response( - 'set_topic_attributes', - {} + "create_topic", + {"TopicArn": "arn:aws:sns:us-east-1:111111111111:sharr-test-topic"}, ) + sns_stubber.add_response("set_topic_attributes", {}) sns_stubber.activate() - mocker.patch('CreateLogMetricFilterAndAlarm_createtopic.connect_to_ssm', return_value=ssm_client) - mocker.patch('CreateLogMetricFilterAndAlarm_createtopic.connect_to_sns', return_value=sns_client) + mocker.patch( + "CreateLogMetricFilterAndAlarm_createtopic.connect_to_ssm", + return_value=ssm_client, + ) + mocker.patch( + "CreateLogMetricFilterAndAlarm_createtopic.connect_to_sns", + return_value=sns_client, + ) assert topicutil.create_encrypted_topic(topic_event(), {}) == { - 'topic_arn': 'arn:aws:sns:us-east-1:111111111111:sharr-test-topic' + "topic_arn": "arn:aws:sns:us-east-1:111111111111:sharr-test-topic" } diff --git a/source/remediation_runbooks/scripts/test/test_disable_publicip_auto_assign.py b/source/remediation_runbooks/scripts/test/test_disable_publicip_auto_assign.py index 5f76b032..b2a25f3e 100644 --- a/source/remediation_runbooks/scripts/test/test_disable_publicip_auto_assign.py +++ b/source/remediation_runbooks/scripts/test/test_disable_publicip_auto_assign.py @@ -1,96 +1,83 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 """Test the functionality of the `disable_publicip_auto_assign` remediation script""" - + from unittest.mock import patch + import boto3 -from botocore.stub import Stubber from botocore.config import Config +from botocore.stub import Stubber from disable_publicip_auto_assign import lambda_handler - + + def test_disable_publicip_auto_assign(mocker): - - BOTO_CONFIG = Config( - retries = { - 'mode': 'standard', - 'max_attempts': 10 - } - ) - ec2 = boto3.client('ec2', config=BOTO_CONFIG) + BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + ec2 = boto3.client("ec2", config=BOTO_CONFIG) stub_ec2 = Stubber(ec2) - clients = { 'ec2': ec2 } - - subnet_arn = 'arn:aws:ec2:us-east-1:111111111111:subnet/subnet-017e22c0195eb5ded' - + clients = {"ec2": ec2} + + subnet_arn = "arn:aws:ec2:us-east-1:111111111111:subnet/subnet-017e22c0195eb5ded" + subnet_id = "subnet-017e22c0195eb5ded" stub_ec2.add_response( - 'modify_subnet_attribute', - {}, - { - "MapPublicIpOnLaunch":{'Value':False}, - 'SubnetId':subnet_id - } - ) + "modify_subnet_attribute", + {}, + {"MapPublicIpOnLaunch": {"Value": False}, "SubnetId": subnet_id}, + ) stub_ec2.add_response( - 'describe_subnets', - describedSubnet, - { 'SubnetIds': [ subnet_id ] } + "describe_subnets", describedSubnet, {"SubnetIds": [subnet_id]} ) - + stub_ec2.activate() - - with patch('boto3.client', side_effect=lambda service, **_ : clients[service]): - event = { 'subnet_arn': subnet_arn } - response = lambda_handler(event, {}) - assert response == { - "MapPublicIpOnLaunch": False -} + + with patch("boto3.client", side_effect=lambda service, **_: clients[service]): + event = {"subnet_arn": subnet_arn} + response = lambda_handler(event, {}) + assert response == {"MapPublicIpOnLaunch": False} + describedSubnet = { - 'Subnets': [ + "Subnets": [ { - 'AvailabilityZone': 'string', - 'AvailabilityZoneId': 'string', - 'AvailableIpAddressCount': 123, - 'CidrBlock': 'string', - 'DefaultForAz': False, - 'EnableLniAtDeviceIndex': 123, - 'MapPublicIpOnLaunch': False, - 'MapCustomerOwnedIpOnLaunch': False, - 'CustomerOwnedIpv4Pool': 'string', - 'State': 'available', - 'SubnetId': 'string', - 'VpcId': 'string', - 'OwnerId': 'string', - 'AssignIpv6AddressOnCreation': False, - 'Ipv6CidrBlockAssociationSet': [ + "AvailabilityZone": "string", + "AvailabilityZoneId": "string", + "AvailableIpAddressCount": 123, + "CidrBlock": "string", + "DefaultForAz": False, + "EnableLniAtDeviceIndex": 123, + "MapPublicIpOnLaunch": False, + "MapCustomerOwnedIpOnLaunch": False, + "CustomerOwnedIpv4Pool": "string", + "State": "available", + "SubnetId": "string", + "VpcId": "string", + "OwnerId": "string", + "AssignIpv6AddressOnCreation": False, + "Ipv6CidrBlockAssociationSet": [ { - 'AssociationId': 'string', - 'Ipv6CidrBlock': 'string', - 'Ipv6CidrBlockState': { - 'State': 'associating', - 'StatusMessage': 'string' - } + "AssociationId": "string", + "Ipv6CidrBlock": "string", + "Ipv6CidrBlockState": { + "State": "associating", + "StatusMessage": "string", + }, }, ], - 'Tags': [ - { - 'Key': 'string', - 'Value': 'string' - }, + "Tags": [ + {"Key": "string", "Value": "string"}, ], - 'SubnetArn': 'string', - 'OutpostArn': 'string', - 'EnableDns64': False, - 'Ipv6Native': False, - 'PrivateDnsNameOptionsOnLaunch': { - 'HostnameType': 'ip-name', - 'EnableResourceNameDnsARecord': False, - 'EnableResourceNameDnsAAAARecord': False - } + "SubnetArn": "string", + "OutpostArn": "string", + "EnableDns64": False, + "Ipv6Native": False, + "PrivateDnsNameOptionsOnLaunch": { + "HostnameType": "ip-name", + "EnableResourceNameDnsARecord": False, + "EnableResourceNameDnsAAAARecord": False, + }, }, ], - 'NextToken': 'string' -} \ No newline at end of file + "NextToken": "string", +} diff --git a/source/remediation_runbooks/scripts/test/test_disableunrestrictedaccesstohighriskports.py b/source/remediation_runbooks/scripts/test/test_disableunrestrictedaccesstohighriskports.py new file mode 100644 index 00000000..d1045c5d --- /dev/null +++ b/source/remediation_runbooks/scripts/test/test_disableunrestrictedaccesstohighriskports.py @@ -0,0 +1,165 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test the functionality of the `DisableUnrestrictedAccessToHighRiskPorts` remediation script""" + +import boto3 +from botocore.config import Config +from DisableUnrestrictedAccessToHighRiskPorts import PORTS_TO_CHECK +from DisableUnrestrictedAccessToHighRiskPorts import lambda_handler as remediation +from moto import mock_aws + +BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + +OPENIPV4 = "0.0.0.0/0" +OPENIPV6 = "::/0" +PROTOCOLS = {"tcp", "udp"} + + +@mock_aws +def test_disable_unrestricted_access_ipv4(): + ec2 = boto3.client("ec2", config=BOTO_CONFIG) + + # Create a new security group + sg = ec2.create_security_group(GroupName="test", Description="test") + + # Get the security group ID + sg_id = sg["GroupId"] + event = {"SecurityGroupId": sg_id} + + # Add a TCP rule for each port that is allowed from anywhere + for port in PORTS_TO_CHECK: + ec2.authorize_security_group_ingress( + GroupId=sg_id, + IpPermissions=[ + { + "FromPort": port, + "IpProtocol": "tcp", + "IpRanges": [ + { + "CidrIp": OPENIPV4, + }, + ], + "ToPort": port, + } + ], + ) + + # Add a UDP rule for each port that is allowed from anywhere + for port in PORTS_TO_CHECK: + ec2.authorize_security_group_ingress( + GroupId=sg_id, + IpPermissions=[ + { + "FromPort": port, + "IpProtocol": "udp", + "IpRanges": [ + { + "CidrIp": OPENIPV4, + }, + ], + "ToPort": port, + } + ], + ) + + remediation(event, {}) + + # Get the security group rules + security_group_rules = ec2.describe_security_group_rules( + Filters=[ + { + "Name": "group-id", + "Values": [ + sg_id, + ], + }, + ], + ) + + for rule in security_group_rules["SecurityGroupRules"]: + # Check only TCP/UDP rules + if rule["IpProtocol"] in PROTOCOLS and "CidrIpv4" in rule: + # Assert the rule does not have open IPV4 access to a high risk port + assert not ( + any( + port in PORTS_TO_CHECK + for port in range(rule["FromPort"], rule["ToPort"] + 1) + ) + and not rule["IsEgress"] + and rule["CidrIpv4"] == OPENIPV4 + ) + + +@mock_aws +def test_disable_unrestricted_access_ipv6(): + ec2 = boto3.client("ec2", config=BOTO_CONFIG) + + # Create a new security group + sg = ec2.create_security_group(GroupName="test", Description="test") + + # Get the security group ID + sg_id = sg["GroupId"] + event = {"SecurityGroupId": sg_id} + + # Add a TCP rule for each port that is allowed from anywhere + for port in PORTS_TO_CHECK: + ec2.authorize_security_group_ingress( + GroupId=sg_id, + IpPermissions=[ + { + "FromPort": port, + "IpProtocol": "tcp", + "Ipv6Ranges": [ + { + "CidrIpv6": OPENIPV6, + }, + ], + "ToPort": port, + } + ], + ) + + # Add a UDP rule for each port that is allowed from anywhere + for port in PORTS_TO_CHECK: + ec2.authorize_security_group_ingress( + GroupId=sg_id, + IpPermissions=[ + { + "FromPort": port, + "IpProtocol": "udp", + "Ipv6Ranges": [ + { + "CidrIpv6": OPENIPV6, + }, + ], + "ToPort": port, + } + ], + ) + + remediation(event, {}) + + # Get the security group rules + security_group_rules = ec2.describe_security_group_rules( + Filters=[ + { + "Name": "group-id", + "Values": [ + sg_id, + ], + }, + ], + ) + + for rule in security_group_rules["SecurityGroupRules"]: + # Check only TCP/UDP rules + if rule["IpProtocol"] in PROTOCOLS and "CidrIpv6" in rule: + # Assert the rule does not have open IPV6 access to a high risk port + assert not ( + any( + port in PORTS_TO_CHECK + for port in range(rule["FromPort"], rule["ToPort"] + 1) + ) + and not rule["IsEgress"] + and rule["CidrIpv6"] == OPENIPV6 + ) diff --git a/source/remediation_runbooks/scripts/test/test_enable_bucket_event_notifications.py b/source/remediation_runbooks/scripts/test/test_enable_bucket_event_notifications.py new file mode 100644 index 00000000..1bc09103 --- /dev/null +++ b/source/remediation_runbooks/scripts/test/test_enable_bucket_event_notifications.py @@ -0,0 +1,428 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test the functionality of the `enable_bucket_event_notifications` remediation script""" + +import json +from unittest.mock import patch + +import boto3 +import pytest +from botocore.config import Config +from botocore.exceptions import UnknownRegionError +from botocore.stub import Stubber +from enable_bucket_event_notifications import lambda_handler + + +def partition_from_region(session: boto3.session.Session): + """ + returns the partition for a given region + On success returns a string + On failure returns aws + """ + try: + partition = session.get_partition_for_region(session.region_name) + except UnknownRegionError: + return "aws" + + return partition + + +bucket_name = "test-bucket" +topic_name = "testTopic" +account_id = "111111111" +event_types = [ + "s3:ReducedRedundancyLostObject", + "s3:ObjectCreated:*", + "s3:ObjectRemoved:*", + "s3:ObjectRestore:*", + "s3:Replication:*", + "s3:LifecycleExpiration:*", + "s3:LifecycleTransition", + "s3:IntelligentTiering", + "s3:ObjectTagging:*", + "s3:ObjectAcl:Put", +] +region = "us-east-1" +session = boto3.session.Session(region_name="us-east-1") +partition = partition_from_region(session) +topic_arn = f"arn:{partition}:sns:{region}:{account_id}:{topic_name}" +BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + +policy = { + "Version": "2012-10-17", + "Id": "ASR Notification Policy", + "Statement": [ + { + "Sid": bucket_name + " ASR Notification Policy", + "Effect": "Allow", + "Principal": {"Service": "s3.amazonaws.com"}, + "Action": ["SNS:Publish"], + "Resource": topic_arn, + "Condition": { + "ArnLike": {"aws:SourceArn": ["arn:aws:s3:::" + bucket_name]}, + "StringEquals": {"aws:SourceAccount": [account_id]}, + }, + } + ], +} + + +def test_enable_bucket_event_notifications(mocker): + sns = boto3.client("sns", config=BOTO_CONFIG) + stub_sns = Stubber(sns) + s3 = boto3.client("s3", config=BOTO_CONFIG) + stub_s3 = Stubber(s3) + clients = {"sns": sns, "s3": s3} + stub_sns.add_response( + "get_topic_attributes", + {}, + { + "TopicArn": topic_arn, + }, + ) + + stub_sns.add_response( + "create_topic", + {"TopicArn": topic_arn}, + {"Name": topic_name, "Attributes": {"Policy": json.dumps(policy)}}, + ) + + stub_s3.add_response( + "put_bucket_notification_configuration", + {}, + { + "Bucket": bucket_name, + "NotificationConfiguration": { + "TopicConfigurations": [ + { + "Id": "ASR Bucket Notification Topic Config", + "Events": [ + "s3:ReducedRedundancyLostObject", + "s3:ObjectCreated:*", + "s3:ObjectRemoved:*", + "s3:ObjectRestore:*", + "s3:Replication:*", + "s3:LifecycleExpiration:*", + "s3:LifecycleTransition", + "s3:IntelligentTiering", + "s3:ObjectTagging:*", + "s3:ObjectAcl:Put", + ], + "TopicArn": topic_arn, + } + ] + }, + }, + ) + + stub_s3.add_response( + "get_bucket_notification_configuration", + { + "TopicConfigurations": [ + { + "Id": "ASR Bucket Notification Topic Config", + "TopicArn": topic_arn, + "Events": [ + "s3:ReducedRedundancyLostObject", + "s3:ObjectCreated:*", + "s3:ObjectRemoved:*", + "s3:ObjectRestore:*", + "s3:Replication:*", + "s3:ObjectTagging:*", + "s3:ObjectAcl:Put", + "s3:LifecycleExpiration:*", + "s3:LifecycleTransition", + "s3:IntelligentTiering", + ], + } + ] + }, + {"Bucket": bucket_name, "ExpectedBucketOwner": account_id}, + ) + + stub_sns.activate() + + stub_s3.activate() + + with patch("boto3.client", side_effect=lambda service, **_: clients[service]): + event = { + "bucket_name": bucket_name, + "topic_name": topic_name, + "account_id": account_id, + "event_types": event_types, + } + response = lambda_handler(event, {}) + assert response == {"NotificationARNs": topic_arn} + + +def test_enable_bucket_event_notifications_topic_exists(mocker): + sns = boto3.client("sns", config=BOTO_CONFIG) + stub_sns = Stubber(sns) + s3 = boto3.client("s3", config=BOTO_CONFIG) + stub_s3 = Stubber(s3) + clients = {"sns": sns, "s3": s3} + received_attributes = { + "Attributes": { + "Policy": '{"Version":"2012-10-17","Id":"ASR Notification Policy","Statement":[{"Sid":"test-bucket ASR Notification Policy","Effect":"Allow","Principal":{"Service":"s3.amazonaws.com"},"Action":"SNS:Publish","Resource":"arn:aws:sns:us-east-1:111111111111:SO0111-ASR-S3BucketNotifications","Condition":{"StringEquals":{"aws:SourceAccount":"111111111111"},"ArnLike":{"aws:SourceArn":"arn:aws:s3:::test1"}}}]}', + "Owner": "111111111111", + "SubscriptionsPending": "0", + "TopicArn": "arn:aws:sns:us-east-1:111111111111:testTopic", + "EffectiveDeliveryPolicy": '{"http":{"defaultHealthyRetryPolicy":{"minDelayTarget":20,"maxDelayTarget":20,"numRetries":3,"numMaxDelayRetries":0,"numNoDelayRetries":0,"numMinDelayRetries":0,"backoffFunction":"linear"},"disableSubscriptionOverrides":false,"defaultRequestPolicy":{"headerContentType":"text/plain; charset=UTF-8"}}}', + "SubscriptionsConfirmed": "0", + "DisplayName": "", + "SubscriptionsDeleted": "0", + } + } + stub_sns.add_response( + "get_topic_attributes", + received_attributes, + { + "TopicArn": topic_arn, + }, + ) + + stub_s3.add_response( + "put_bucket_notification_configuration", + {}, + { + "Bucket": bucket_name, + "NotificationConfiguration": { + "TopicConfigurations": [ + { + "Id": "ASR Bucket Notification Topic Config", + "Events": [ + "s3:ReducedRedundancyLostObject", + "s3:ObjectCreated:*", + "s3:ObjectRemoved:*", + "s3:ObjectRestore:*", + "s3:Replication:*", + "s3:LifecycleExpiration:*", + "s3:LifecycleTransition", + "s3:IntelligentTiering", + "s3:ObjectTagging:*", + "s3:ObjectAcl:Put", + ], + "TopicArn": topic_arn, + } + ] + }, + }, + ) + + stub_s3.add_response( + "get_bucket_notification_configuration", + { + "TopicConfigurations": [ + { + "Id": "ASR Bucket Notification Topic Config", + "TopicArn": topic_arn, + "Events": [ + "s3:ReducedRedundancyLostObject", + "s3:ObjectCreated:*", + "s3:ObjectRemoved:*", + "s3:ObjectRestore:*", + "s3:Replication:*", + "s3:ObjectTagging:*", + "s3:ObjectAcl:Put", + "s3:LifecycleExpiration:*", + "s3:LifecycleTransition", + "s3:IntelligentTiering", + ], + } + ] + }, + {"Bucket": bucket_name, "ExpectedBucketOwner": account_id}, + ) + + stub_sns.activate() + + stub_s3.activate() + + with patch("boto3.client", side_effect=lambda service, **_: clients[service]): + event = { + "bucket_name": bucket_name, + "topic_name": topic_name, + "account_id": account_id, + "event_types": event_types, + } + response = lambda_handler(event, {}) + assert response == {"NotificationARNs": topic_arn} + + +def test_enable_bucket_event_notifications_topic_exists_sid_exists(mocker): + sns = boto3.client("sns", config=BOTO_CONFIG) + stub_sns = Stubber(sns) + s3 = boto3.client("s3", config=BOTO_CONFIG) + stub_s3 = Stubber(s3) + clients = {"sns": sns, "s3": s3} + received_attributes = { + "Attributes": { + "Policy": '{"Version":"2012-10-17","Id":"ASR Notification Policy","Statement":[{"Sid":"ASR Notification Policy","Effect":"Allow","Principal":{"Service":"s3.amazonaws.com"},"Action":"SNS:Publish","Resource":"arn:aws:sns:us-east-1:111111111111:SO0111-ASR-S3BucketNotifications","Condition":{"StringEquals":{"aws:SourceAccount":"111111111111"},"ArnLike":{"aws:SourceArn":"arn:aws:s3:::test1"}}}]}', + "Owner": "111111111111", + "SubscriptionsPending": "0", + "TopicArn": "arn:aws:sns:us-east-1:111111111111:testTopic", + "EffectiveDeliveryPolicy": '{"http":{"defaultHealthyRetryPolicy":{"minDelayTarget":20,"maxDelayTarget":20,"numRetries":3,"numMaxDelayRetries":0,"numNoDelayRetries":0,"numMinDelayRetries":0,"backoffFunction":"linear"},"disableSubscriptionOverrides":false,"defaultRequestPolicy":{"headerContentType":"text/plain; charset=UTF-8"}}}', + "SubscriptionsConfirmed": "0", + "DisplayName": "", + "SubscriptionsDeleted": "0", + } + } + stub_sns.add_response( + "get_topic_attributes", + received_attributes, + { + "TopicArn": topic_arn, + }, + ) + topic_attributes_policy = received_attributes["Attributes"]["Policy"] # str + topic_attributes_policy_dict = json.loads(topic_attributes_policy) # dict + topic_attributes_policy_dict["Statement"].append(policy["Statement"][0]) + new_topic_attributes_policy = json.dumps(topic_attributes_policy_dict) + stub_sns.add_response( + "set_topic_attributes", + {}, + { + "TopicArn": topic_arn, + "AttributeName": "Policy", + "AttributeValue": new_topic_attributes_policy, + }, + ) + + stub_s3.add_response( + "put_bucket_notification_configuration", + {}, + { + "Bucket": bucket_name, + "NotificationConfiguration": { + "TopicConfigurations": [ + { + "Id": "ASR Bucket Notification Topic Config", + "Events": [ + "s3:ReducedRedundancyLostObject", + "s3:ObjectCreated:*", + "s3:ObjectRemoved:*", + "s3:ObjectRestore:*", + "s3:Replication:*", + "s3:LifecycleExpiration:*", + "s3:LifecycleTransition", + "s3:IntelligentTiering", + "s3:ObjectTagging:*", + "s3:ObjectAcl:Put", + ], + "TopicArn": topic_arn, + } + ] + }, + }, + ) + + stub_s3.add_response( + "get_bucket_notification_configuration", + { + "TopicConfigurations": [ + { + "Id": "ASR Bucket Notification Topic Config", + "TopicArn": topic_arn, + "Events": [ + "s3:ReducedRedundancyLostObject", + "s3:ObjectCreated:*", + "s3:ObjectRemoved:*", + "s3:ObjectRestore:*", + "s3:Replication:*", + "s3:ObjectTagging:*", + "s3:ObjectAcl:Put", + "s3:LifecycleExpiration:*", + "s3:LifecycleTransition", + "s3:IntelligentTiering", + ], + } + ] + }, + {"Bucket": bucket_name, "ExpectedBucketOwner": account_id}, + ) + + stub_sns.activate() + + stub_s3.activate() + + with patch("boto3.client", side_effect=lambda service, **_: clients[service]): + event = { + "bucket_name": bucket_name, + "topic_name": topic_name, + "account_id": account_id, + "event_types": event_types, + } + response = lambda_handler(event, {}) + assert response == {"NotificationARNs": topic_arn} + + +def test_error_case(mocker): + sns = boto3.client("sns", config=BOTO_CONFIG) + stub_sns = Stubber(sns) + s3 = boto3.client("s3", config=BOTO_CONFIG) + stub_s3 = Stubber(s3) + clients = {"sns": sns, "s3": s3} + stub_sns.add_response( + "get_topic_attributes", + {}, + { + "TopicArn": topic_arn, + }, + ) + + stub_sns.add_response( + "create_topic", + {"TopicArn": topic_arn}, + {"Name": topic_name, "Attributes": {"Policy": json.dumps(policy)}}, + ) + + stub_s3.add_response( + "put_bucket_notification_configuration", + {}, + { + "Bucket": bucket_name, + "NotificationConfiguration": { + "TopicConfigurations": [ + { + "Id": "ASR Bucket Notification Topic Config", + "Events": [ + "s3:ReducedRedundancyLostObject", + "s3:ObjectCreated:*", + "s3:ObjectRemoved:*", + "s3:ObjectRestore:*", + "s3:Replication:*", + "s3:LifecycleExpiration:*", + "s3:LifecycleTransition", + "s3:IntelligentTiering", + "s3:ObjectTagging:*", + "s3:ObjectAcl:Put", + ], + "TopicArn": topic_arn, + } + ] + }, + }, + ) + + stub_s3.add_response( + "get_bucket_notification_configuration", + {}, + {"Bucket": bucket_name, "ExpectedBucketOwner": account_id}, + ) + + stub_sns.activate() + + stub_s3.activate() + + with patch("boto3.client", side_effect=lambda service, **_: clients[service]): + event = { + "bucket_name": bucket_name, + "topic_name": topic_name, + "account_id": account_id, + "event_types": event_types, + } + with pytest.raises( + RuntimeError, + match=f"ERROR: {bucket_name} was not configured with notifications", + ): + lambda_handler(event, {}) diff --git a/source/remediation_runbooks/scripts/test/test_enable_cloudfront_default_root_object.py b/source/remediation_runbooks/scripts/test/test_enable_cloudfront_default_root_object.py new file mode 100644 index 00000000..4f9c84a2 --- /dev/null +++ b/source/remediation_runbooks/scripts/test/test_enable_cloudfront_default_root_object.py @@ -0,0 +1,81 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +import boto3 +from botocore.config import Config +from enable_cloudfront_default_root_object import handler as remediation +from moto import mock_aws + +BOTO_CONFIG = Config( + retries={"mode": "standard", "max_attempts": 10}, region_name="us-east-1" +) + + +@mock_aws +def test_update_root_distribution(): + cloudfront_client = boto3.client("cloudfront", config=BOTO_CONFIG) + response = cloudfront_client.create_distribution( + DistributionConfig={ + "CallerReference": "my-distribution-7-20-2", + "Aliases": {"Quantity": 1, "Items": ["test.com"]}, + "Origins": { + "Quantity": 1, + "Items": [ + { + "Id": "my-origin", + "DomainName": "example.com", + "CustomOriginConfig": { + "HTTPPort": 80, + "HTTPSPort": 443, + "OriginProtocolPolicy": "https-only", + "OriginSslProtocols": {"Quantity": 1, "Items": ["TLSv1.2"]}, + }, + } + ], + }, + "DefaultCacheBehavior": { + "TargetOriginId": "my-origin", + "ViewerProtocolPolicy": "redirect-to-https", + "DefaultTTL": 86400, + "AllowedMethods": {"Quantity": 2, "Items": ["GET", "HEAD"]}, + "ForwardedValues": { + "QueryString": False, + "Cookies": {"Forward": "none"}, + "Headers": {"Quantity": 0}, + }, + "TrustedSigners": {"Enabled": False, "Quantity": 0}, + "MinTTL": 0, + }, + "Comment": "My CloudFront distribution", + "Enabled": True, + } + ) + + print(response) + + distribution_arn = response["Distribution"]["ARN"] + + distribution_id = response["Distribution"]["Id"] + + print("Fake Moto Cloudfront Distribution ID: " + distribution_id) + + print( + "now calling remediation script with " + + distribution_id + + " as the target to update" + ) + # call remediation script + event = { + "cloudfront_distribution": distribution_arn, + "root_object": "index.html", + } + remediation( + event, + {}, + ) + + updated_response = cloudfront_client.get_distribution(Id=distribution_id) + updated_root_object = updated_response["Distribution"]["DistributionConfig"][ + "DefaultRootObject" + ] + assert updated_root_object == "index.html" diff --git a/source/remediation_runbooks/scripts/test/test_enable_delivery_status_logging.py b/source/remediation_runbooks/scripts/test/test_enable_delivery_status_logging.py index afe9cf52..fe5fbbba 100644 --- a/source/remediation_runbooks/scripts/test/test_enable_delivery_status_logging.py +++ b/source/remediation_runbooks/scripts/test/test_enable_delivery_status_logging.py @@ -3,93 +3,105 @@ """Test the functionality of the `enable_delivery_status_logging` remediation script""" from unittest.mock import patch + import boto3 -from botocore.stub import Stubber from botocore.config import Config +from botocore.stub import Stubber from enable_delivery_status_logging import lambda_handler + def test_enables_delivery_status_logging(mocker): - endpointTypes = ['HTTP', 'Firehose', 'Lambda', 'Application', 'SQS'] + endpointTypes = ["HTTP", "Firehose", "Lambda", "Application", "SQS"] my_session = boto3.session.Session() - my_region = my_session.region_name + my_region = my_session.region_name - BOTO_CONFIG = Config( - retries = { - 'mode': 'standard', - 'max_attempts': 10 - } - ) - sns = boto3.client('sns', config=BOTO_CONFIG) + BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + sns = boto3.client("sns", config=BOTO_CONFIG) stub_sns = Stubber(sns) - clients = { 'sns': sns } + clients = {"sns": sns} - logging_arn = 'logging_arn' - topic_arn = f'arn:aws:sns:{my_region}:111111111111:sharr-test' + logging_arn = "logging_arn" + topic_arn = f"arn:aws:sns:{my_region}:111111111111:sharr-test" - stub_response = { 'Attributes' : { - "LambdaFailureFeedbackRoleArn": logging_arn, - "LambdaSuccessFeedbackRoleArn": logging_arn, - "LambdaSuccessFeedbackSampleRate": '0', - "HTTPFailureFeedbackRoleArn": logging_arn, - "HTTPSuccessFeedbackRoleArn": logging_arn, - "HTTPSuccessFeedbackSampleRate": '0', - "FirehoseFailureFeedbackRoleArn": logging_arn, - "FirehoseSuccessFeedbackRoleArn": logging_arn, - "FirehoseSuccessFeedbackSampleRate": '0', - "ApplicationFailureFeedbackRoleArn": logging_arn, - "ApplicationSuccessFeedbackRoleArn": logging_arn, - "ApplicationSuccessFeedbackSampleRate": '0', - "SQSFailureFeedbackRoleArn": logging_arn, - "SQSSuccessFeedbackRoleArn": logging_arn, - "SQSSuccessFeedbackSampleRate": '0' - }} + stub_response = { + "Attributes": { + "LambdaFailureFeedbackRoleArn": logging_arn, + "LambdaSuccessFeedbackRoleArn": logging_arn, + "LambdaSuccessFeedbackSampleRate": "0", + "HTTPFailureFeedbackRoleArn": logging_arn, + "HTTPSuccessFeedbackRoleArn": logging_arn, + "HTTPSuccessFeedbackSampleRate": "0", + "FirehoseFailureFeedbackRoleArn": logging_arn, + "FirehoseSuccessFeedbackRoleArn": logging_arn, + "FirehoseSuccessFeedbackSampleRate": "0", + "ApplicationFailureFeedbackRoleArn": logging_arn, + "ApplicationSuccessFeedbackRoleArn": logging_arn, + "ApplicationSuccessFeedbackSampleRate": "0", + "SQSFailureFeedbackRoleArn": logging_arn, + "SQSSuccessFeedbackRoleArn": logging_arn, + "SQSSuccessFeedbackSampleRate": "0", + } + } for endpoint in endpointTypes: - stub_sns.add_response( - 'set_topic_attributes', - {}, - { - 'TopicArn': topic_arn , - 'AttributeName': f"{endpoint}SuccessFeedbackRoleArn", - 'AttributeValue': logging_arn}) + stub_sns.add_response( + "set_topic_attributes", + {}, + { + "TopicArn": topic_arn, + "AttributeName": f"{endpoint}SuccessFeedbackRoleArn", + "AttributeValue": logging_arn, + }, + ) - stub_sns.add_response('set_topic_attributes', - {}, - { 'TopicArn': topic_arn , - 'AttributeName': f"{endpoint}FailureFeedbackRoleArn" , - 'AttributeValue': logging_arn}) - - for endpoint in endpointTypes: - stub_sns.add_response('set_topic_attributes', - {}, - { 'TopicArn': topic_arn , - 'AttributeName': f"{endpoint}SuccessFeedbackSampleRate" , - 'AttributeValue': '0'}) + stub_sns.add_response( + "set_topic_attributes", + {}, + { + "TopicArn": topic_arn, + "AttributeName": f"{endpoint}FailureFeedbackRoleArn", + "AttributeValue": logging_arn, + }, + ) - stub_sns.add_response('get_topic_attributes', - stub_response, - { 'TopicArn': topic_arn }) + for endpoint in endpointTypes: + stub_sns.add_response( + "set_topic_attributes", + {}, + { + "TopicArn": topic_arn, + "AttributeName": f"{endpoint}SuccessFeedbackSampleRate", + "AttributeValue": "0", + }, + ) + + stub_sns.add_response( + "get_topic_attributes", stub_response, {"TopicArn": topic_arn} + ) stub_sns.activate() - with patch('boto3.client', side_effect=lambda service, **_ : clients[service]): - event = { 'topic_arn': topic_arn, 'logging_role': logging_arn, 'sample_rate': '0' } + with patch("boto3.client", side_effect=lambda service, **_: clients[service]): + event = { + "topic_arn": topic_arn, + "logging_role": logging_arn, + "sample_rate": "0", + } response = lambda_handler(event, {}) - assert response == { + assert response == { "LambdaFailureFeedbackRoleArn": logging_arn, "LambdaSuccessFeedbackRoleArn": logging_arn, - "LambdaSuccessFeedbackSampleRate": '0', + "LambdaSuccessFeedbackSampleRate": "0", "HTTPFailureFeedbackRoleArn": logging_arn, "HTTPSuccessFeedbackRoleArn": logging_arn, - "HTTPSuccessFeedbackSampleRate": '0', + "HTTPSuccessFeedbackSampleRate": "0", "FirehoseFailureFeedbackRoleArn": logging_arn, "FirehoseSuccessFeedbackRoleArn": logging_arn, - "FirehoseSuccessFeedbackSampleRate": '0', + "FirehoseSuccessFeedbackSampleRate": "0", "ApplicationFailureFeedbackRoleArn": logging_arn, "ApplicationSuccessFeedbackRoleArn": logging_arn, - "ApplicationSuccessFeedbackSampleRate": '0', + "ApplicationSuccessFeedbackSampleRate": "0", "SQSFailureFeedbackRoleArn": logging_arn, "SQSSuccessFeedbackRoleArn": logging_arn, - "SQSSuccessFeedbackSampleRate": '0' } - - + "SQSSuccessFeedbackSampleRate": "0", + } diff --git a/source/remediation_runbooks/scripts/test/test_enable_imds_V2_on_instance.py b/source/remediation_runbooks/scripts/test/test_enable_imds_V2_on_instance.py new file mode 100644 index 00000000..2e462a61 --- /dev/null +++ b/source/remediation_runbooks/scripts/test/test_enable_imds_V2_on_instance.py @@ -0,0 +1,257 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test the functionality of the `enable_imds_v2_on_instance` remediation script""" + +from datetime import datetime +from typing import TYPE_CHECKING +from unittest.mock import patch + +import boto3 +from botocore.config import Config +from botocore.stub import Stubber +from enable_imds_v2_on_instance import lambda_handler + +if TYPE_CHECKING: + from mypy_boto3_ec2.client import EC2Client + from mypy_boto3_ec2.type_defs import DescribeInstancesResultTypeDef +else: + DescribeInstancesResultTypeDef = object + + +def test_enable_imds_v2_on_instance(mocker): + BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + ec2: EC2Client = boto3.client("ec2", config=BOTO_CONFIG) + stub_ec2 = Stubber(ec2) + clients = {"ec2": ec2} + + instance_arn = ( + "arn:aws:ec2:us-east-1:111111111111:instance/instance-017e22c0195eb5ded" + ) + + instance_id = "instance-017e22c0195eb5ded" + + stub_ec2.add_response( + "modify_instance_metadata_options", + {}, + { + "InstanceId": instance_id, + "HttpTokens": "required", + "HttpEndpoint": "enabled", + }, + ) + + stub_ec2.add_response( + "describe_instances", describedInstance, {"InstanceIds": [instance_id]} + ) + + stub_ec2.activate() + + metadata_options = describedInstance["Reservations"][0]["Instances"][0][ + "MetadataOptions" + ] + + with patch("boto3.client", side_effect=lambda service, **_: clients[service]): + event = {"instance_arn": instance_arn} + response = lambda_handler(event, {}) + assert response == metadata_options + + +describedInstance: DescribeInstancesResultTypeDef = { + "Reservations": [ + { + "Groups": [ + {"GroupName": "string", "GroupId": "string"}, + ], + "Instances": [ + { + "AmiLaunchIndex": 123, + "ImageId": "string", + "InstanceId": "string", + "InstanceType": "m7i-flex.8xlarge", + "KernelId": "string", + "KeyName": "string", + "LaunchTime": datetime(2015, 1, 1), + "Monitoring": {"State": "pending"}, + "Placement": { + "AvailabilityZone": "string", + "Affinity": "string", + "GroupName": "string", + "PartitionNumber": 123, + "HostId": "string", + "Tenancy": "host", + "SpreadDomain": "string", + "HostResourceGroupArn": "string", + "GroupId": "string", + }, + "Platform": "Windows", + "PrivateDnsName": "string", + "PrivateIpAddress": "string", + "ProductCodes": [ + {"ProductCodeId": "string", "ProductCodeType": "marketplace"}, + ], + "PublicDnsName": "string", + "PublicIpAddress": "string", + "RamdiskId": "string", + "State": {"Code": 123, "Name": "stopped"}, + "StateTransitionReason": "string", + "SubnetId": "string", + "VpcId": "string", + "Architecture": "arm64_mac", + "BlockDeviceMappings": [ + { + "DeviceName": "string", + "Ebs": { + "AttachTime": datetime(2015, 1, 1), + "DeleteOnTermination": False, + "Status": "detached", + "VolumeId": "string", + }, + }, + ], + "ClientToken": "string", + "EbsOptimized": False, + "EnaSupport": False, + "Hypervisor": "xen", + "IamInstanceProfile": {"Arn": "string", "Id": "string"}, + "InstanceLifecycle": "scheduled", + "ElasticGpuAssociations": [ + { + "ElasticGpuId": "string", + "ElasticGpuAssociationId": "string", + "ElasticGpuAssociationState": "string", + "ElasticGpuAssociationTime": "2023-08-21T23:02:50+00:00", + }, + ], + "ElasticInferenceAcceleratorAssociations": [ + { + "ElasticInferenceAcceleratorArn": "string", + "ElasticInferenceAcceleratorAssociationId": "string", + "ElasticInferenceAcceleratorAssociationState": "string", + "ElasticInferenceAcceleratorAssociationTime": datetime( + 2015, 1, 1 + ), + }, + ], + "NetworkInterfaces": [ + { + "Association": { + "CarrierIp": "string", + "CustomerOwnedIp": "string", + "IpOwnerId": "string", + "PublicDnsName": "string", + "PublicIp": "string", + }, + "Attachment": { + "AttachTime": datetime(2015, 1, 1), + "AttachmentId": "string", + "DeleteOnTermination": False, + "DeviceIndex": 123, + "Status": "detached", + "NetworkCardIndex": 123, + }, + "Description": "string", + "Groups": [ + {"GroupName": "string", "GroupId": "string"}, + ], + "Ipv6Addresses": [], + "MacAddress": "string", + "NetworkInterfaceId": "string", + "OwnerId": "string", + "PrivateDnsName": "string", + "PrivateIpAddress": "string", + "PrivateIpAddresses": [ + { + "Association": { + "CarrierIp": "string", + "CustomerOwnedIp": "string", + "IpOwnerId": "string", + "PublicDnsName": "string", + "PublicIp": "string", + }, + "Primary": False, + "PrivateDnsName": "string", + "PrivateIpAddress": "string", + }, + ], + "SourceDestCheck": False, + "Status": "detaching", + "SubnetId": "string", + "VpcId": "string", + "InterfaceType": "string", + "Ipv4Prefixes": [ + {"Ipv4Prefix": "string"}, + ], + "Ipv6Prefixes": [ + {"Ipv6Prefix": "string"}, + ], + }, + ], + "OutpostArn": "string", + "RootDeviceName": "string", + "RootDeviceType": "instance-store", + "SecurityGroups": [ + {"GroupName": "string", "GroupId": "string"}, + ], + "SourceDestCheck": False, + "SpotInstanceRequestId": "string", + "SriovNetSupport": "string", + "StateReason": { + "Code": "string", + "Message": "string", + # type: ignore[typeddict-item] + }, + "Tags": [ + {"Key": "string", "Value": "string"}, + ], + "VirtualizationType": "paravirtual", + "CpuOptions": {"CoreCount": 123, "ThreadsPerCore": 123}, + "CapacityReservationId": "string", + "CapacityReservationSpecification": { + "CapacityReservationPreference": "none", + "CapacityReservationTarget": { + "CapacityReservationId": "string", + "CapacityReservationResourceGroupArn": "string", + }, + }, + "HibernationOptions": {"Configured": False}, + "Licenses": [ + {"LicenseConfigurationArn": "string"}, + ], + "MetadataOptions": { + "State": "applied", + "HttpTokens": "required", + "HttpPutResponseHopLimit": 123, + "HttpEndpoint": "enabled", + "HttpProtocolIpv6": "enabled", + "InstanceMetadataTags": "enabled", + }, + "EnclaveOptions": {"Enabled": False}, + "BootMode": "uefi-preferred", + "PlatformDetails": "string", + "UsageOperation": "string", + "UsageOperationUpdateTime": datetime(2015, 1, 1), + "PrivateDnsNameOptions": { + "HostnameType": "resource-name", + "EnableResourceNameDnsARecord": False, + "EnableResourceNameDnsAAAARecord": False, + }, + "Ipv6Address": "string", + "TpmSupport": "string", + "MaintenanceOptions": {"AutoRecovery": "default"}, + "CurrentInstanceBootMode": "uefi", + }, + ], + "OwnerId": "string", + "RequesterId": "string", + "ReservationId": "string", + }, + ], + "NextToken": "string", + "ResponseMetadata": { + "RequestId": "string", + "HostId": "string", + "HTTPStatusCode": 404, + "HTTPHeaders": {}, + "RetryAttempts": 1, + }, +} diff --git a/source/remediation_runbooks/scripts/test/test_enable_minor_version_upgrade_rds.py b/source/remediation_runbooks/scripts/test/test_enable_minor_version_upgrade_rds.py index a6ed100c..4a6cbb1f 100644 --- a/source/remediation_runbooks/scripts/test/test_enable_minor_version_upgrade_rds.py +++ b/source/remediation_runbooks/scripts/test/test_enable_minor_version_upgrade_rds.py @@ -1,665 +1,609 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 """Test the functionality of the `enable_minor_version_upgrade_rds` remediation script""" - + from unittest.mock import patch + import boto3 -from botocore.stub import Stubber from botocore.config import Config +from botocore.stub import Stubber from enable_minor_version_upgrade_rds import lambda_handler - + + def test_enable_minor_version_upgrade_rds_cluster(mocker): - - BOTO_CONFIG = Config( - retries = { - 'mode': 'standard', - 'max_attempts': 10 - } - ) - rds = boto3.client('rds', config=BOTO_CONFIG) + BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + rds = boto3.client("rds", config=BOTO_CONFIG) stub_rds = Stubber(rds) - clients = { 'rds': rds } + clients = {"rds": rds} + + instanceId = "database-instance" + clusterId = "database-cluster" - instanceId = 'database-instance' - clusterId = 'database-cluster' - stub_rds.add_response( - 'describe_db_instances', - getDescribedClusterInstance(), - { - "DBInstanceIdentifier": instanceId - } + "describe_db_instances", + getDescribedClusterInstance(), + {"DBInstanceIdentifier": instanceId}, ) stub_rds.add_response( - 'describe_db_clusters', - getDescribedMultiAZCluster(), - { - "DBClusterIdentifier": clusterId - } + "describe_db_clusters", + getDescribedMultiAZCluster(), + {"DBClusterIdentifier": clusterId}, ) stub_rds.add_response( - 'modify_db_cluster', - {}, - { - "DBClusterIdentifier": clusterId, - "AutoMinorVersionUpgrade": True - } - ) + "modify_db_cluster", + {}, + {"DBClusterIdentifier": clusterId, "AutoMinorVersionUpgrade": True}, + ) stub_rds.add_response( - 'describe_db_clusters', - getDescribedMultiAZClusterMinorVersionUpgrade(), - { - "DBClusterIdentifier": clusterId, - "MaxRecords":100 - } - ) - + "describe_db_clusters", + getDescribedMultiAZClusterMinorVersionUpgrade(), + {"DBClusterIdentifier": clusterId, "MaxRecords": 100}, + ) + stub_rds.activate() - - with patch('boto3.client', side_effect=lambda service, **_ : clients[service]): - event = { 'DBInstanceIdentifier': instanceId } - response = lambda_handler(event, {}) - assert response == { - "AutoMinorVersionUpgrade": True - } + + with patch("boto3.client", side_effect=lambda service, **_: clients[service]): + event = {"DBInstanceIdentifier": instanceId} + response = lambda_handler(event, {}) + assert response == {"AutoMinorVersionUpgrade": True} + def test_enable_minor_version_upgrade_rds_instance(mocker): - - BOTO_CONFIG = Config( - retries = { - 'mode': 'standard', - 'max_attempts': 10 - } - ) - rds = boto3.client('rds', config=BOTO_CONFIG) + BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + rds = boto3.client("rds", config=BOTO_CONFIG) stub_rds = Stubber(rds) - clients = { 'rds': rds } + clients = {"rds": rds} + + instanceId = "database-instance" - instanceId = 'database-instance' - stub_rds.add_response( - 'describe_db_instances', - getDescribedInstance(), - { - "DBInstanceIdentifier": instanceId - } + "describe_db_instances", + getDescribedInstance(), + {"DBInstanceIdentifier": instanceId}, ) stub_rds.add_response( - 'modify_db_instance', - {}, - { - "DBInstanceIdentifier": instanceId, - "AutoMinorVersionUpgrade": True - } - ) + "modify_db_instance", + {}, + {"DBInstanceIdentifier": instanceId, "AutoMinorVersionUpgrade": True}, + ) stub_rds.add_response( - 'describe_db_instances', - getDescribedInstanceMinorVersionUpgrade(), - { - "DBInstanceIdentifier": instanceId, - "MaxRecords":100 - } + "describe_db_instances", + getDescribedInstanceMinorVersionUpgrade(), + {"DBInstanceIdentifier": instanceId, "MaxRecords": 100}, ) stub_rds.activate() - - with patch('boto3.client', side_effect=lambda service, **_ : clients[service]): - event = { 'DBInstanceIdentifier': instanceId } - response = lambda_handler(event, {}) - assert response == { - "AutoMinorVersionUpgrade": True -} + + with patch("boto3.client", side_effect=lambda service, **_: clients[service]): + event = {"DBInstanceIdentifier": instanceId} + response = lambda_handler(event, {}) + assert response == {"AutoMinorVersionUpgrade": True} + def getDescribedClusterInstance(): - return { - 'DBInstances': - [{ - 'DBInstanceIdentifier': 'database-instance', - 'DBClusterIdentifier': 'database-cluster', - 'DBInstanceClass': 'db.r6g.2xlarge', - 'Engine': 'aurora-mysql', - 'DBInstanceStatus': 'available', - 'MasterUsername': 'admin', - 'Endpoint': - { - 'Address': 'test.amazonaws.com', - 'Port': 321, - 'HostedZoneId': 'test' - }, - 'AllocatedStorage': 1, - 'PreferredBackupWindow': '09:05-09:35', - 'BackupRetentionPeriod': 1, - 'DBSecurityGroups': [], - 'VpcSecurityGroups': - [{ - 'VpcSecurityGroupId': - 'sg-', - 'Status': 'active' - }], - 'DBParameterGroups': - [{ - 'DBParameterGroupName': 'default.aurora-mysql5.7', - 'ParameterApplyStatus': 'in-sync' - }], - 'AvailabilityZone': 'us-east-1a', - 'DBSubnetGroup': - { - 'DBSubnetGroupName': 'default-vpc-', - 'DBSubnetGroupDescription': 'Created from the RDS Management Console', - 'VpcId': 'vpc-', - 'SubnetGroupStatus': 'Complete', - 'Subnets': - [{ - 'SubnetIdentifier': 'subnet-', - 'SubnetAvailabilityZone': {'Name': 'us-east-1c'}, - 'SubnetOutpost': {}, - 'SubnetStatus': 'Active' - }, - { - 'SubnetIdentifier': - 'subnet-', - 'SubnetAvailabilityZone': {'Name': 'us-east-1f'}, - 'SubnetOutpost': {}, - 'SubnetStatus': 'Active' - }, - { - 'SubnetIdentifier': 'subnet-', - 'SubnetAvailabilityZone': {'Name': 'us-east-1b'}, - 'SubnetOutpost': {}, - 'SubnetStatus': 'Active' - }, - { - 'SubnetIdentifier': 'subnet-', - 'SubnetAvailabilityZone': {'Name': 'us-east-1a'}, - 'SubnetOutpost': {}, - 'SubnetStatus': 'Active' - }, - { - 'SubnetIdentifier': 'subnet-', - 'SubnetAvailabilityZone': {'Name': 'us-east-1e'}, - 'SubnetOutpost': {}, - 'SubnetStatus': 'Active' - }, + return { + "DBInstances": [ { - 'SubnetIdentifier': 'subnet-', - 'SubnetAvailabilityZone': {'Name': 'us-east-1d'}, - 'SubnetOutpost': {}, 'SubnetStatus': 'Active' + "DBInstanceIdentifier": "database-instance", + "DBClusterIdentifier": "database-cluster", + "DBInstanceClass": "db.r6g.2xlarge", + "Engine": "aurora-mysql", + "DBInstanceStatus": "available", + "MasterUsername": "admin", + "Endpoint": { + "Address": "test.amazonaws.com", + "Port": 321, + "HostedZoneId": "test", + }, + "AllocatedStorage": 1, + "PreferredBackupWindow": "09:05-09:35", + "BackupRetentionPeriod": 1, + "DBSecurityGroups": [], + "VpcSecurityGroups": [ + {"VpcSecurityGroupId": "sg-", "Status": "active"} + ], + "DBParameterGroups": [ + { + "DBParameterGroupName": "default.aurora-mysql5.7", + "ParameterApplyStatus": "in-sync", + } + ], + "AvailabilityZone": "us-east-1a", + "DBSubnetGroup": { + "DBSubnetGroupName": "default-vpc-", + "DBSubnetGroupDescription": "Created from the RDS Management Console", + "VpcId": "vpc-", + "SubnetGroupStatus": "Complete", + "Subnets": [ + { + "SubnetIdentifier": "subnet-", + "SubnetAvailabilityZone": {"Name": "us-east-1c"}, + "SubnetOutpost": {}, + "SubnetStatus": "Active", + }, + { + "SubnetIdentifier": "subnet-", + "SubnetAvailabilityZone": {"Name": "us-east-1f"}, + "SubnetOutpost": {}, + "SubnetStatus": "Active", + }, + { + "SubnetIdentifier": "subnet-", + "SubnetAvailabilityZone": {"Name": "us-east-1b"}, + "SubnetOutpost": {}, + "SubnetStatus": "Active", + }, + { + "SubnetIdentifier": "subnet-", + "SubnetAvailabilityZone": {"Name": "us-east-1a"}, + "SubnetOutpost": {}, + "SubnetStatus": "Active", + }, + { + "SubnetIdentifier": "subnet-", + "SubnetAvailabilityZone": {"Name": "us-east-1e"}, + "SubnetOutpost": {}, + "SubnetStatus": "Active", + }, + { + "SubnetIdentifier": "subnet-", + "SubnetAvailabilityZone": {"Name": "us-east-1d"}, + "SubnetOutpost": {}, + "SubnetStatus": "Active", + }, + ], + }, + "PreferredMaintenanceWindow": "sun:07:48-sun:08:18", + "PendingModifiedValues": {}, + "MultiAZ": False, + "EngineVersion": "5.7.mysql_aurora.2.10.2", + "AutoMinorVersionUpgrade": False, + "ReadReplicaDBInstanceIdentifiers": [], + "LicenseModel": "general-public-license", + "OptionGroupMemberships": [ + {"OptionGroupName": "default:aurora-mysql-5-7", "Status": "in-sync"} + ], + "PubliclyAccessible": False, + "StorageType": "aurora", + "DbInstancePort": 0, + "StorageEncrypted": False, + "DbiResourceId": "db-", + "CACertificateIdentifier": "rds-ca-2019", + "DomainMemberships": [], + "CopyTagsToSnapshot": False, + "MonitoringInterval": 60, + "EnhancedMonitoringResourceArn": "", + "MonitoringRoleArn": "", + "PromotionTier": 1, + "DBInstanceArn": "", + "IAMDatabaseAuthenticationEnabled": False, + "PerformanceInsightsEnabled": False, + "DeletionProtection": False, + "AssociatedRoles": [], + "TagList": [], + "CustomerOwnedIpEnabled": False, + "BackupTarget": "region", } - ] - }, - 'PreferredMaintenanceWindow': 'sun:07:48-sun:08:18', - 'PendingModifiedValues': {}, - 'MultiAZ': False, - 'EngineVersion': '5.7.mysql_aurora.2.10.2', - 'AutoMinorVersionUpgrade': False, - 'ReadReplicaDBInstanceIdentifiers': [], - 'LicenseModel': 'general-public-license', - 'OptionGroupMemberships': - [{ - 'OptionGroupName': 'default:aurora-mysql-5-7', - 'Status': 'in-sync' - }], - 'PubliclyAccessible': False, - 'StorageType': 'aurora', - 'DbInstancePort': 0, - 'StorageEncrypted': False, - 'DbiResourceId': 'db-', - 'CACertificateIdentifier': 'rds-ca-2019', - 'DomainMemberships': [], - 'CopyTagsToSnapshot': False, - 'MonitoringInterval': 60, - 'EnhancedMonitoringResourceArn': '', - 'MonitoringRoleArn': '', - 'PromotionTier': 1, - 'DBInstanceArn': '', - 'IAMDatabaseAuthenticationEnabled': False, - 'PerformanceInsightsEnabled': False, - 'DeletionProtection': False, - 'AssociatedRoles': [], - 'TagList': [], - 'CustomerOwnedIpEnabled': False, - 'BackupTarget': 'region' - }], - 'ResponseMetadata': - {'RequestId': '319d76ec-75e9-4030-9c4c-a5b648c0186e', - 'HTTPStatusCode': 200, - 'HTTPHeaders': - { - 'x-amzn-requestid': '319d76ec-75e9-4030-9c4c-a5b648c0186e', - 'strict-transport-security': 'max-age=31536000', - 'content-type': 'text/xml', - 'content-length': '6206', - 'date': 'Wed, 25 Jan 2023 22:48:55 GMT' - }, - 'RetryAttempts': 0 - }} + ], + "ResponseMetadata": { + "RequestId": "319d76ec-75e9-4030-9c4c-a5b648c0186e", + "HTTPStatusCode": 200, + "HTTPHeaders": { + "x-amzn-requestid": "319d76ec-75e9-4030-9c4c-a5b648c0186e", + "strict-transport-security": "max-age=31536000", + "content-type": "text/xml", + "content-length": "6206", + "date": "Wed, 25 Jan 2023 22:48:55 GMT", + }, + "RetryAttempts": 0, + }, + } + def getDescribedInstanceMinorVersionUpgrade(): return { - 'DBInstances': - [{ - 'DBInstanceIdentifier': 'database-instance', - 'DBInstanceClass': 'db.r6g.2xlarge', - 'Engine': 'aurora-mysql', - 'DBInstanceStatus': 'available', - 'MasterUsername': 'admin', - 'Endpoint': - { - 'Address': 'test.amazonaws.com', - 'Port': 321, - 'HostedZoneId': 'test' - }, - 'AllocatedStorage': 1, - 'PreferredBackupWindow': '09:05-09:35', - 'BackupRetentionPeriod': 1, - 'DBSecurityGroups': [], - 'VpcSecurityGroups': - [{ - 'VpcSecurityGroupId': - 'sg-', - 'Status': 'active' - }], - 'DBParameterGroups': - [{ - 'DBParameterGroupName': 'default.aurora-mysql5.7', - 'ParameterApplyStatus': 'in-sync' - }], - 'AvailabilityZone': 'us-east-1a', - 'DBSubnetGroup': - { - 'DBSubnetGroupName': 'default-vpc-', - 'DBSubnetGroupDescription': 'Created from the RDS Management Console', - 'VpcId': 'vpc-', - 'SubnetGroupStatus': 'Complete', - 'Subnets': - [{ - 'SubnetIdentifier': 'subnet-', - 'SubnetAvailabilityZone': {'Name': 'us-east-1c'}, - 'SubnetOutpost': {}, - 'SubnetStatus': 'Active' - }, - { - 'SubnetIdentifier': - 'subnet-', - 'SubnetAvailabilityZone': {'Name': 'us-east-1f'}, - 'SubnetOutpost': {}, - 'SubnetStatus': 'Active' - }, - { - 'SubnetIdentifier': 'subnet-', - 'SubnetAvailabilityZone': {'Name': 'us-east-1b'}, - 'SubnetOutpost': {}, - 'SubnetStatus': 'Active' - }, - { - 'SubnetIdentifier': 'subnet-', - 'SubnetAvailabilityZone': {'Name': 'us-east-1a'}, - 'SubnetOutpost': {}, - 'SubnetStatus': 'Active' - }, + "DBInstances": [ { - 'SubnetIdentifier': 'subnet-', - 'SubnetAvailabilityZone': {'Name': 'us-east-1e'}, - 'SubnetOutpost': {}, - 'SubnetStatus': 'Active' - }, - { - 'SubnetIdentifier': 'subnet-', - 'SubnetAvailabilityZone': {'Name': 'us-east-1d'}, - 'SubnetOutpost': {}, 'SubnetStatus': 'Active' + "DBInstanceIdentifier": "database-instance", + "DBInstanceClass": "db.r6g.2xlarge", + "Engine": "aurora-mysql", + "DBInstanceStatus": "available", + "MasterUsername": "admin", + "Endpoint": { + "Address": "test.amazonaws.com", + "Port": 321, + "HostedZoneId": "test", + }, + "AllocatedStorage": 1, + "PreferredBackupWindow": "09:05-09:35", + "BackupRetentionPeriod": 1, + "DBSecurityGroups": [], + "VpcSecurityGroups": [ + {"VpcSecurityGroupId": "sg-", "Status": "active"} + ], + "DBParameterGroups": [ + { + "DBParameterGroupName": "default.aurora-mysql5.7", + "ParameterApplyStatus": "in-sync", + } + ], + "AvailabilityZone": "us-east-1a", + "DBSubnetGroup": { + "DBSubnetGroupName": "default-vpc-", + "DBSubnetGroupDescription": "Created from the RDS Management Console", + "VpcId": "vpc-", + "SubnetGroupStatus": "Complete", + "Subnets": [ + { + "SubnetIdentifier": "subnet-", + "SubnetAvailabilityZone": {"Name": "us-east-1c"}, + "SubnetOutpost": {}, + "SubnetStatus": "Active", + }, + { + "SubnetIdentifier": "subnet-", + "SubnetAvailabilityZone": {"Name": "us-east-1f"}, + "SubnetOutpost": {}, + "SubnetStatus": "Active", + }, + { + "SubnetIdentifier": "subnet-", + "SubnetAvailabilityZone": {"Name": "us-east-1b"}, + "SubnetOutpost": {}, + "SubnetStatus": "Active", + }, + { + "SubnetIdentifier": "subnet-", + "SubnetAvailabilityZone": {"Name": "us-east-1a"}, + "SubnetOutpost": {}, + "SubnetStatus": "Active", + }, + { + "SubnetIdentifier": "subnet-", + "SubnetAvailabilityZone": {"Name": "us-east-1e"}, + "SubnetOutpost": {}, + "SubnetStatus": "Active", + }, + { + "SubnetIdentifier": "subnet-", + "SubnetAvailabilityZone": {"Name": "us-east-1d"}, + "SubnetOutpost": {}, + "SubnetStatus": "Active", + }, + ], + }, + "PreferredMaintenanceWindow": "sun:07:48-sun:08:18", + "PendingModifiedValues": {}, + "MultiAZ": False, + "EngineVersion": "5.7.mysql_aurora.2.10.2", + "AutoMinorVersionUpgrade": True, + "ReadReplicaDBInstanceIdentifiers": [], + "LicenseModel": "general-public-license", + "OptionGroupMemberships": [ + {"OptionGroupName": "default:aurora-mysql-5-7", "Status": "in-sync"} + ], + "PubliclyAccessible": False, + "StorageType": "aurora", + "DbInstancePort": 0, + "StorageEncrypted": False, + "DbiResourceId": "db-", + "CACertificateIdentifier": "rds-ca-2019", + "DomainMemberships": [], + "CopyTagsToSnapshot": False, + "MonitoringInterval": 60, + "EnhancedMonitoringResourceArn": "", + "MonitoringRoleArn": "", + "PromotionTier": 1, + "DBInstanceArn": "", + "IAMDatabaseAuthenticationEnabled": False, + "PerformanceInsightsEnabled": False, + "DeletionProtection": False, + "AssociatedRoles": [], + "TagList": [], + "CustomerOwnedIpEnabled": False, + "BackupTarget": "region", } - ] - }, - 'PreferredMaintenanceWindow': 'sun:07:48-sun:08:18', - 'PendingModifiedValues': {}, - 'MultiAZ': False, - 'EngineVersion': '5.7.mysql_aurora.2.10.2', - 'AutoMinorVersionUpgrade': True, - 'ReadReplicaDBInstanceIdentifiers': [], - 'LicenseModel': 'general-public-license', - 'OptionGroupMemberships': - [{ - 'OptionGroupName': 'default:aurora-mysql-5-7', - 'Status': 'in-sync' - }], - 'PubliclyAccessible': False, - 'StorageType': 'aurora', - 'DbInstancePort': 0, - 'StorageEncrypted': False, - 'DbiResourceId': 'db-', - 'CACertificateIdentifier': 'rds-ca-2019', - 'DomainMemberships': [], - 'CopyTagsToSnapshot': False, - 'MonitoringInterval': 60, - 'EnhancedMonitoringResourceArn': '', - 'MonitoringRoleArn': '', - 'PromotionTier': 1, - 'DBInstanceArn': '', - 'IAMDatabaseAuthenticationEnabled': False, - 'PerformanceInsightsEnabled': False, - 'DeletionProtection': False, - 'AssociatedRoles': [], - 'TagList': [], - 'CustomerOwnedIpEnabled': False, - 'BackupTarget': 'region' - }], - 'ResponseMetadata': - {'RequestId': '319d76ec-75e9-4030-9c4c-a5b648c0186e', - 'HTTPStatusCode': 200, - 'HTTPHeaders': - { - 'x-amzn-requestid': '319d76ec-75e9-4030-9c4c-a5b648c0186e', - 'strict-transport-security': 'max-age=31536000', - 'content-type': 'text/xml', - 'content-length': '6206', - 'date': 'Wed, 25 Jan 2023 22:48:55 GMT' - }, - 'RetryAttempts': 0 - }} + ], + "ResponseMetadata": { + "RequestId": "319d76ec-75e9-4030-9c4c-a5b648c0186e", + "HTTPStatusCode": 200, + "HTTPHeaders": { + "x-amzn-requestid": "319d76ec-75e9-4030-9c4c-a5b648c0186e", + "strict-transport-security": "max-age=31536000", + "content-type": "text/xml", + "content-length": "6206", + "date": "Wed, 25 Jan 2023 22:48:55 GMT", + }, + "RetryAttempts": 0, + }, + } + -def getDescribedInstance(): +def getDescribedInstance(): return { - 'DBInstances': - [ - { - 'DBInstanceIdentifier': 'database-instance', - 'DBInstanceClass': 'db.r6g.2xlarge', - 'Engine': 'aurora-mysql', - 'DBInstanceStatus': 'available', - 'MasterUsername': 'admin', - 'Endpoint': - { - 'Address': 'test.amazonaws.com', - 'Port': 321, - 'HostedZoneId': 'test' - }, - 'AllocatedStorage': 1, - 'PreferredBackupWindow': '09:05-09:35', - 'BackupRetentionPeriod': 1, - 'DBSecurityGroups': [], - 'VpcSecurityGroups': - [{ - 'VpcSecurityGroupId': - 'sg-', - 'Status': 'active' - }], - 'DBParameterGroups': - [{ - 'DBParameterGroupName': 'default.aurora-mysql5.7', - 'ParameterApplyStatus': 'in-sync' - }], - 'AvailabilityZone': 'us-east-1a', - 'DBSubnetGroup': - { - 'DBSubnetGroupName': 'default-vpc-', - 'DBSubnetGroupDescription': 'Created from the RDS Management Console', - 'VpcId': 'vpc-', - 'SubnetGroupStatus': 'Complete', - 'Subnets': - [{ - 'SubnetIdentifier': 'subnet-', - 'SubnetAvailabilityZone': {'Name': 'us-east-1c'}, - 'SubnetOutpost': {}, - 'SubnetStatus': 'Active' - }, - { - 'SubnetIdentifier': - 'subnet-', - 'SubnetAvailabilityZone': {'Name': 'us-east-1f'}, - 'SubnetOutpost': {}, - 'SubnetStatus': 'Active' - }, - { - 'SubnetIdentifier': 'subnet-', - 'SubnetAvailabilityZone': {'Name': 'us-east-1b'}, - 'SubnetOutpost': {}, - 'SubnetStatus': 'Active' - }, + "DBInstances": [ { - 'SubnetIdentifier': 'subnet-', - 'SubnetAvailabilityZone': {'Name': 'us-east-1a'}, - 'SubnetOutpost': {}, - 'SubnetStatus': 'Active' - }, - { - 'SubnetIdentifier': 'subnet-', - 'SubnetAvailabilityZone': {'Name': 'us-east-1e'}, - 'SubnetOutpost': {}, - 'SubnetStatus': 'Active' - }, - { - 'SubnetIdentifier': 'subnet-', - 'SubnetAvailabilityZone': {'Name': 'us-east-1d'}, - 'SubnetOutpost': {}, 'SubnetStatus': 'Active' + "DBInstanceIdentifier": "database-instance", + "DBInstanceClass": "db.r6g.2xlarge", + "Engine": "aurora-mysql", + "DBInstanceStatus": "available", + "MasterUsername": "admin", + "Endpoint": { + "Address": "test.amazonaws.com", + "Port": 321, + "HostedZoneId": "test", + }, + "AllocatedStorage": 1, + "PreferredBackupWindow": "09:05-09:35", + "BackupRetentionPeriod": 1, + "DBSecurityGroups": [], + "VpcSecurityGroups": [ + {"VpcSecurityGroupId": "sg-", "Status": "active"} + ], + "DBParameterGroups": [ + { + "DBParameterGroupName": "default.aurora-mysql5.7", + "ParameterApplyStatus": "in-sync", + } + ], + "AvailabilityZone": "us-east-1a", + "DBSubnetGroup": { + "DBSubnetGroupName": "default-vpc-", + "DBSubnetGroupDescription": "Created from the RDS Management Console", + "VpcId": "vpc-", + "SubnetGroupStatus": "Complete", + "Subnets": [ + { + "SubnetIdentifier": "subnet-", + "SubnetAvailabilityZone": {"Name": "us-east-1c"}, + "SubnetOutpost": {}, + "SubnetStatus": "Active", + }, + { + "SubnetIdentifier": "subnet-", + "SubnetAvailabilityZone": {"Name": "us-east-1f"}, + "SubnetOutpost": {}, + "SubnetStatus": "Active", + }, + { + "SubnetIdentifier": "subnet-", + "SubnetAvailabilityZone": {"Name": "us-east-1b"}, + "SubnetOutpost": {}, + "SubnetStatus": "Active", + }, + { + "SubnetIdentifier": "subnet-", + "SubnetAvailabilityZone": {"Name": "us-east-1a"}, + "SubnetOutpost": {}, + "SubnetStatus": "Active", + }, + { + "SubnetIdentifier": "subnet-", + "SubnetAvailabilityZone": {"Name": "us-east-1e"}, + "SubnetOutpost": {}, + "SubnetStatus": "Active", + }, + { + "SubnetIdentifier": "subnet-", + "SubnetAvailabilityZone": {"Name": "us-east-1d"}, + "SubnetOutpost": {}, + "SubnetStatus": "Active", + }, + ], + }, + "PreferredMaintenanceWindow": "sun:07:48-sun:08:18", + "PendingModifiedValues": {}, + "MultiAZ": False, + "EngineVersion": "5.7.mysql_aurora.2.10.2", + "AutoMinorVersionUpgrade": False, + "ReadReplicaDBInstanceIdentifiers": [], + "LicenseModel": "general-public-license", + "OptionGroupMemberships": [ + {"OptionGroupName": "default:aurora-mysql-5-7", "Status": "in-sync"} + ], + "PubliclyAccessible": False, + "StorageType": "aurora", + "DbInstancePort": 0, + "StorageEncrypted": False, + "DbiResourceId": "db-", + "CACertificateIdentifier": "rds-ca-2019", + "DomainMemberships": [], + "CopyTagsToSnapshot": False, + "MonitoringInterval": 60, + "EnhancedMonitoringResourceArn": "", + "MonitoringRoleArn": "", + "PromotionTier": 1, + "DBInstanceArn": "", + "IAMDatabaseAuthenticationEnabled": False, + "PerformanceInsightsEnabled": False, + "DeletionProtection": False, + "AssociatedRoles": [], + "TagList": [], + "CustomerOwnedIpEnabled": False, + "BackupTarget": "region", } - ] - }, - 'PreferredMaintenanceWindow': 'sun:07:48-sun:08:18', - 'PendingModifiedValues': {}, - 'MultiAZ': False, - 'EngineVersion': '5.7.mysql_aurora.2.10.2', - 'AutoMinorVersionUpgrade': False, - 'ReadReplicaDBInstanceIdentifiers': [], - 'LicenseModel': 'general-public-license', - 'OptionGroupMemberships': - [{ - 'OptionGroupName': 'default:aurora-mysql-5-7', - 'Status': 'in-sync' - }], - 'PubliclyAccessible': False, - 'StorageType': 'aurora', - 'DbInstancePort': 0, - 'StorageEncrypted': False, - 'DbiResourceId': 'db-', - 'CACertificateIdentifier': 'rds-ca-2019', - 'DomainMemberships': [], - 'CopyTagsToSnapshot': False, - 'MonitoringInterval': 60, - 'EnhancedMonitoringResourceArn': '', - 'MonitoringRoleArn': '', - 'PromotionTier': 1, - 'DBInstanceArn': '', - 'IAMDatabaseAuthenticationEnabled': False, - 'PerformanceInsightsEnabled': False, - 'DeletionProtection': False, - 'AssociatedRoles': [], - 'TagList': [], - 'CustomerOwnedIpEnabled': False, - 'BackupTarget': 'region' - }], - 'ResponseMetadata': - {'RequestId': '319d76ec-75e9-4030-9c4c-a5b648c0186e', - 'HTTPStatusCode': 200, - 'HTTPHeaders': - { - 'x-amzn-requestid': '319d76ec-75e9-4030-9c4c-a5b648c0186e', - 'strict-transport-security': 'max-age=31536000', - 'content-type': 'text/xml', - 'content-length': '6206', - 'date': 'Wed, 25 Jan 2023 22:48:55 GMT' - }, - 'RetryAttempts': 0 - }} + ], + "ResponseMetadata": { + "RequestId": "319d76ec-75e9-4030-9c4c-a5b648c0186e", + "HTTPStatusCode": 200, + "HTTPHeaders": { + "x-amzn-requestid": "319d76ec-75e9-4030-9c4c-a5b648c0186e", + "strict-transport-security": "max-age=31536000", + "content-type": "text/xml", + "content-length": "6206", + "date": "Wed, 25 Jan 2023 22:48:55 GMT", + }, + "RetryAttempts": 0, + }, + } + def getDescribedMultiAZCluster(): return { - 'DBClusters': - [ - { - 'AllocatedStorage': 400, - 'AvailabilityZones': ['us-east-1a', 'us-east-1d', 'us-east-1f'], - 'BackupRetentionPeriod': 7, - 'DBClusterIdentifier': 'database-cluster', - 'DBClusterParameterGroup': 'default.postgres13', - 'DBSubnetGroup': 'default-vpc-', - 'Status': 'available', - 'Endpoint': '', - 'ReaderEndpoint': '', - 'MultiAZ': True, - 'Engine': 'postgres', - 'EngineVersion': '13.7', - 'Port': 5432, - 'MasterUsername': 'postgres', - 'PreferredBackupWindow': '08:46-09:16', - 'PreferredMaintenanceWindow': 'thu:09:24-thu:09:54', - 'ReadReplicaIdentifiers': [], - 'DBClusterMembers': - [{ - 'DBInstanceIdentifier': 'database-3-instance-1', - 'IsClusterWriter': True, - 'DBClusterParameterGroupStatus': 'in-sync', - 'PromotionTier': 1 - }, - { - 'DBInstanceIdentifier': 'database-3-instance-2', - 'IsClusterWriter': False, - 'DBClusterParameterGroupStatus': 'in-sync', - 'PromotionTier': 1 - }, - { - 'DBInstanceIdentifier': 'database-3-instance-3', - 'IsClusterWriter': False, - 'DBClusterParameterGroupStatus': 'in-sync', - 'PromotionTier': 1 - } - ], - 'VpcSecurityGroups': - [ - { - 'VpcSecurityGroupId': 'sg-', - 'Status': 'active' - } - ], - 'HostedZoneId': '', - 'StorageEncrypted': True, - 'KmsKeyId': '', - 'DbClusterResourceId': '', - 'DBClusterArn': '', - 'AssociatedRoles': [], - 'IAMDatabaseAuthenticationEnabled': False, - 'EngineMode': 'provisioned', - 'DeletionProtection': False, - 'HttpEndpointEnabled': False, - 'ActivityStreamStatus': 'stopped', - 'CopyTagsToSnapshot': False, - 'CrossAccountClone': False, - 'DomainMemberships': [], - 'TagList': [], - 'DBClusterInstanceClass': 'db.m5d.large', - 'StorageType': 'io1', - 'Iops': 3000, - 'PubliclyAccessible': False, - 'AutoMinorVersionUpgrade': False, - 'MonitoringInterval': 0, - 'PerformanceInsightsEnabled': False - } - ], - 'ResponseMetadata': + "DBClusters": [ { - 'RequestId': '', - 'HTTPStatusCode': 200, - 'HTTPHeaders': { - 'x-amzn-requestid': '9', - 'strict-transport-security': 'max-age=31536000', - 'content-type': 'text/xml', - 'content-length': '4369', - 'date': 'Wed, 25 Jan 2023 22:57:06 GMT' - }, - 'RetryAttempts': 0 + "AllocatedStorage": 400, + "AvailabilityZones": ["us-east-1a", "us-east-1d", "us-east-1f"], + "BackupRetentionPeriod": 7, + "DBClusterIdentifier": "database-cluster", + "DBClusterParameterGroup": "default.postgres13", + "DBSubnetGroup": "default-vpc-", + "Status": "available", + "Endpoint": "", + "ReaderEndpoint": "", + "MultiAZ": True, + "Engine": "postgres", + "EngineVersion": "13.7", + "Port": 5432, + "MasterUsername": "postgres", + "PreferredBackupWindow": "08:46-09:16", + "PreferredMaintenanceWindow": "thu:09:24-thu:09:54", + "ReadReplicaIdentifiers": [], + "DBClusterMembers": [ + { + "DBInstanceIdentifier": "database-3-instance-1", + "IsClusterWriter": True, + "DBClusterParameterGroupStatus": "in-sync", + "PromotionTier": 1, + }, + { + "DBInstanceIdentifier": "database-3-instance-2", + "IsClusterWriter": False, + "DBClusterParameterGroupStatus": "in-sync", + "PromotionTier": 1, + }, + { + "DBInstanceIdentifier": "database-3-instance-3", + "IsClusterWriter": False, + "DBClusterParameterGroupStatus": "in-sync", + "PromotionTier": 1, + }, + ], + "VpcSecurityGroups": [ + {"VpcSecurityGroupId": "sg-", "Status": "active"} + ], + "HostedZoneId": "", + "StorageEncrypted": True, + "KmsKeyId": "", + "DbClusterResourceId": "", + "DBClusterArn": "", + "AssociatedRoles": [], + "IAMDatabaseAuthenticationEnabled": False, + "EngineMode": "provisioned", + "DeletionProtection": False, + "HttpEndpointEnabled": False, + "ActivityStreamStatus": "stopped", + "CopyTagsToSnapshot": False, + "CrossAccountClone": False, + "DomainMemberships": [], + "TagList": [], + "DBClusterInstanceClass": "db.m5d.large", + "StorageType": "io1", + "Iops": 3000, + "PubliclyAccessible": False, + "AutoMinorVersionUpgrade": False, + "MonitoringInterval": 0, + "PerformanceInsightsEnabled": False, } - } + ], + "ResponseMetadata": { + "RequestId": "", + "HTTPStatusCode": 200, + "HTTPHeaders": { + "x-amzn-requestid": "9", + "strict-transport-security": "max-age=31536000", + "content-type": "text/xml", + "content-length": "4369", + "date": "Wed, 25 Jan 2023 22:57:06 GMT", + }, + "RetryAttempts": 0, + }, + } -def getDescribedMultiAZClusterMinorVersionUpgrade(): + +def getDescribedMultiAZClusterMinorVersionUpgrade(): return { - 'DBClusters': - [ - { - 'AllocatedStorage': 400, - 'AvailabilityZones': ['us-east-1a', 'us-east-1d', 'us-east-1f'], - 'BackupRetentionPeriod': 7, - 'DBClusterIdentifier': 'database-cluster', - 'DBClusterParameterGroup': 'default.postgres13', - 'DBSubnetGroup': 'default-vpc-', - 'Status': 'available', - 'Endpoint': '', - 'ReaderEndpoint': '', - 'MultiAZ': True, - 'Engine': 'postgres', - 'EngineVersion': '13.7', - 'Port': 5432, - 'MasterUsername': 'postgres', - 'PreferredBackupWindow': '08:46-09:16', - 'PreferredMaintenanceWindow': 'thu:09:24-thu:09:54', - 'ReadReplicaIdentifiers': [], - 'DBClusterMembers': - [{ - 'DBInstanceIdentifier': 'database-3-instance-1', - 'IsClusterWriter': True, - 'DBClusterParameterGroupStatus': 'in-sync', - 'PromotionTier': 1 - }, - { - 'DBInstanceIdentifier': 'database-3-instance-2', - 'IsClusterWriter': False, - 'DBClusterParameterGroupStatus': 'in-sync', - 'PromotionTier': 1 - }, - { - 'DBInstanceIdentifier': 'database-3-instance-3', - 'IsClusterWriter': False, - 'DBClusterParameterGroupStatus': 'in-sync', - 'PromotionTier': 1 - } - ], - 'VpcSecurityGroups': - [ - { - 'VpcSecurityGroupId': 'sg-', - 'Status': 'active' - } - ], - 'HostedZoneId': '', - 'StorageEncrypted': True, - 'KmsKeyId': '', - 'DbClusterResourceId': '', - 'DBClusterArn': '', - 'AssociatedRoles': [], - 'IAMDatabaseAuthenticationEnabled': False, - 'EngineMode': 'provisioned', - 'DeletionProtection': False, - 'HttpEndpointEnabled': False, - 'ActivityStreamStatus': 'stopped', - 'CopyTagsToSnapshot': False, - 'CrossAccountClone': False, - 'DomainMemberships': [], - 'TagList': [], - 'DBClusterInstanceClass': 'db.m5d.large', - 'StorageType': 'io1', - 'Iops': 3000, - 'PubliclyAccessible': False, - 'AutoMinorVersionUpgrade': True, - 'MonitoringInterval': 0, - 'PerformanceInsightsEnabled': False - }], - 'ResponseMetadata': + "DBClusters": [ { - 'RequestId': '', - 'HTTPStatusCode': 200, - 'HTTPHeaders': { - 'x-amzn-requestid': '9', - 'strict-transport-security': 'max-age=31536000', - 'content-type': 'text/xml', - 'content-length': '4369', - 'date': 'Wed, 25 Jan 2023 22:57:06 GMT' - }, - 'RetryAttempts': 0 + "AllocatedStorage": 400, + "AvailabilityZones": ["us-east-1a", "us-east-1d", "us-east-1f"], + "BackupRetentionPeriod": 7, + "DBClusterIdentifier": "database-cluster", + "DBClusterParameterGroup": "default.postgres13", + "DBSubnetGroup": "default-vpc-", + "Status": "available", + "Endpoint": "", + "ReaderEndpoint": "", + "MultiAZ": True, + "Engine": "postgres", + "EngineVersion": "13.7", + "Port": 5432, + "MasterUsername": "postgres", + "PreferredBackupWindow": "08:46-09:16", + "PreferredMaintenanceWindow": "thu:09:24-thu:09:54", + "ReadReplicaIdentifiers": [], + "DBClusterMembers": [ + { + "DBInstanceIdentifier": "database-3-instance-1", + "IsClusterWriter": True, + "DBClusterParameterGroupStatus": "in-sync", + "PromotionTier": 1, + }, + { + "DBInstanceIdentifier": "database-3-instance-2", + "IsClusterWriter": False, + "DBClusterParameterGroupStatus": "in-sync", + "PromotionTier": 1, + }, + { + "DBInstanceIdentifier": "database-3-instance-3", + "IsClusterWriter": False, + "DBClusterParameterGroupStatus": "in-sync", + "PromotionTier": 1, + }, + ], + "VpcSecurityGroups": [ + {"VpcSecurityGroupId": "sg-", "Status": "active"} + ], + "HostedZoneId": "", + "StorageEncrypted": True, + "KmsKeyId": "", + "DbClusterResourceId": "", + "DBClusterArn": "", + "AssociatedRoles": [], + "IAMDatabaseAuthenticationEnabled": False, + "EngineMode": "provisioned", + "DeletionProtection": False, + "HttpEndpointEnabled": False, + "ActivityStreamStatus": "stopped", + "CopyTagsToSnapshot": False, + "CrossAccountClone": False, + "DomainMemberships": [], + "TagList": [], + "DBClusterInstanceClass": "db.m5d.large", + "StorageType": "io1", + "Iops": 3000, + "PubliclyAccessible": False, + "AutoMinorVersionUpgrade": True, + "MonitoringInterval": 0, + "PerformanceInsightsEnabled": False, } - } - - - + ], + "ResponseMetadata": { + "RequestId": "", + "HTTPStatusCode": 200, + "HTTPHeaders": { + "x-amzn-requestid": "9", + "strict-transport-security": "max-age=31536000", + "content-type": "text/xml", + "content-length": "4369", + "date": "Wed, 25 Jan 2023 22:57:06 GMT", + }, + "RetryAttempts": 0, + }, + } diff --git a/source/remediation_runbooks/scripts/test/test_enableautoscalinggroupelbhealthcheck.py b/source/remediation_runbooks/scripts/test/test_enableautoscalinggroupelbhealthcheck.py index 580c7a46..ba51f58b 100644 --- a/source/remediation_runbooks/scripts/test/test_enableautoscalinggroupelbhealthcheck.py +++ b/source/remediation_runbooks/scripts/test/test_enableautoscalinggroupelbhealthcheck.py @@ -1,27 +1,24 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import boto3 -import json import botocore.session -from botocore.stub import Stubber -from botocore.config import Config -import pytest -from pytest_mock import mocker - import EnableAutoScalingGroupELBHealthCheck_validate as validate +from botocore.config import Config +from botocore.stub import Stubber my_session = boto3.session.Session() my_region = my_session.region_name -#===================================================================================== + +# ===================================================================================== # EnableAutoScalingGroupELBHealthCheck_remediation SUCCESS -#===================================================================================== +# ===================================================================================== def test_validation_success(mocker): event = { - 'SolutionId': 'SO0000', - 'SolutionVersion': '1.2.3', - 'AsgName': 'my_asg', - 'region': my_region + "SolutionId": "SO0000", + "SolutionVersion": "1.2.3", + "AsgName": "my_asg", + "region": my_region, } good_response = { "AutoScalingGroups": [ @@ -31,15 +28,13 @@ def test_validation_success(mocker): "LaunchTemplate": { "LaunchTemplateId": "lt-05ad2fca4f4ea7d2f", "LaunchTemplateName": "sharrtest", - "Version": "$Default" + "Version": "$Default", }, "MinSize": 0, "MaxSize": 1, "DesiredCapacity": 0, "DefaultCooldown": 300, - "AvailabilityZones": [ - "us-east-1b" - ], + "AvailabilityZones": ["us-east-1b"], "LoadBalancerNames": [], "TargetGroupARNs": [ "arn:aws:elasticloadbalancing:us-east-1:111111111111:targetgroup/WebDemoTarget/fc9a82512b92af62" @@ -52,47 +47,43 @@ def test_validation_success(mocker): "VPCZoneIdentifier": "subnet-86a594ab", "EnabledMetrics": [], "Tags": [], - "TerminationPolicies": [ - "Default" - ], + "TerminationPolicies": ["Default"], "NewInstancesProtectedFromScaleIn": False, - "ServiceLinkedRoleARN": "arn:aws:iam::111111111111:role/aws-service-role/autoscaling.amazonaws.com/AWSServiceRoleForAutoScaling" + "ServiceLinkedRoleARN": "arn:aws:iam::111111111111:role/aws-service-role/autoscaling.amazonaws.com/AWSServiceRoleForAutoScaling", } ] } - BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region + BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) + asg_client = botocore.session.get_session().create_client( + "autoscaling", config=BOTO_CONFIG ) - asg_client = botocore.session.get_session().create_client('autoscaling', config=BOTO_CONFIG) asg_stubber = Stubber(asg_client) - asg_stubber.add_response( - 'describe_auto_scaling_groups', - good_response - ) + asg_stubber.add_response("describe_auto_scaling_groups", good_response) asg_stubber.activate() - mocker.patch('EnableAutoScalingGroupELBHealthCheck_validate.connect_to_autoscaling', return_value=asg_client) + mocker.patch( + "EnableAutoScalingGroupELBHealthCheck_validate.connect_to_autoscaling", + return_value=asg_client, + ) assert validate.verify(event, {}) == { "response": { "message": "Autoscaling Group health check type updated to ELB", - "status": "Success" + "status": "Success", } } asg_stubber.deactivate() + def test_validation_failed(mocker): event = { - 'SolutionId': 'SO0000', - 'SolutionVersion': '1.2.3', - 'AsgName': 'my_asg', - 'region': my_region + "SolutionId": "SO0000", + "SolutionVersion": "1.2.3", + "AsgName": "my_asg", + "region": my_region, } bad_response = { "AutoScalingGroups": [ @@ -102,15 +93,13 @@ def test_validation_failed(mocker): "LaunchTemplate": { "LaunchTemplateId": "lt-05ad2fca4f4ea7d2f", "LaunchTemplateName": "sharrtest", - "Version": "$Default" + "Version": "$Default", }, "MinSize": 0, "MaxSize": 1, "DesiredCapacity": 0, "DefaultCooldown": 300, - "AvailabilityZones": [ - "us-east-1b" - ], + "AvailabilityZones": ["us-east-1b"], "LoadBalancerNames": [], "TargetGroupARNs": [ "arn:aws:elasticloadbalancing:us-east-1:111111111111:targetgroup/WebDemoTarget/fc9a82512b92af62" @@ -123,36 +112,31 @@ def test_validation_failed(mocker): "VPCZoneIdentifier": "subnet-86a594ab", "EnabledMetrics": [], "Tags": [], - "TerminationPolicies": [ - "Default" - ], + "TerminationPolicies": ["Default"], "NewInstancesProtectedFromScaleIn": False, - "ServiceLinkedRoleARN": "arn:aws:iam::111111111111:role/aws-service-role/autoscaling.amazonaws.com/AWSServiceRoleForAutoScaling" + "ServiceLinkedRoleARN": "arn:aws:iam::111111111111:role/aws-service-role/autoscaling.amazonaws.com/AWSServiceRoleForAutoScaling", } ] } - BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region + BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) + asg_client = botocore.session.get_session().create_client( + "autoscaling", config=BOTO_CONFIG ) - asg_client = botocore.session.get_session().create_client('autoscaling', config=BOTO_CONFIG) asg_stubber = Stubber(asg_client) - asg_stubber.add_response( - 'describe_auto_scaling_groups', - bad_response - ) + asg_stubber.add_response("describe_auto_scaling_groups", bad_response) asg_stubber.activate() - mocker.patch('EnableAutoScalingGroupELBHealthCheck_validate.connect_to_autoscaling', return_value=asg_client) + mocker.patch( + "EnableAutoScalingGroupELBHealthCheck_validate.connect_to_autoscaling", + return_value=asg_client, + ) assert validate.verify(event, {}) == { "response": { "message": "Autoscaling Group health check type is not ELB", - "status": "Failed" + "status": "Failed", } } diff --git a/source/remediation_runbooks/scripts/test/test_enableawsconfig.py b/source/remediation_runbooks/scripts/test/test_enableawsconfig.py index 400b72db..75aca400 100644 --- a/source/remediation_runbooks/scripts/test/test_enableawsconfig.py +++ b/source/remediation_runbooks/scripts/test/test_enableawsconfig.py @@ -1,239 +1,197 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import boto3 import json -import botocore.session -from botocore.stub import Stubber, ANY -from botocore.config import Config -import pytest -from pytest_mock import mocker +import boto3 +import botocore.session import EnableAWSConfig_createconfigbucket as createconfigbucket import EnableAWSConfig_createtopic as createtopic import EnableAWSConfig_enableconfig as enableconfig import EnableAWSConfig_summary as summary +from botocore.config import Config +from botocore.stub import ANY, Stubber my_session = boto3.session.Session() my_region = my_session.region_name + def test_create_config_bucket(mocker): event = { - 'kms_key_arn': f'arn:aws:kms:us-west-2:111111111111:key/1234abcd-12ab-34cd-56ef-1234567890ab', - 'partition': 'aws', - 'account': '111111111111', - 'region': 'us-west-2', - 'logging_bucket': 'mahfakebukkit' + "kms_key_arn": "arn:aws:kms:us-west-2:111111111111:key/1234abcd-12ab-34cd-56ef-1234567890ab", + "partition": "aws", + "account": "111111111111", + "region": "us-west-2", + "logging_bucket": "mahfakebukkit", } bucket = f'so0111-aws-config-{event["region"]}-{event["account"]}' bucket_policy = { "Version": "2012-10-17", "Statement": [ - { - "Sid": "AWSConfigBucketPermissionsCheck", - "Effect": "Allow", - "Principal": { - "Service": [ - "config.amazonaws.com" - ] + { + "Sid": "AWSConfigBucketPermissionsCheck", + "Effect": "Allow", + "Principal": {"Service": ["config.amazonaws.com"]}, + "Action": "s3:GetBucketAcl", + "Resource": f'arn:{event["partition"]}:s3:::{bucket}', }, - "Action": "s3:GetBucketAcl", - "Resource": f'arn:{event["partition"]}:s3:::{bucket}' - }, - { - "Sid": "AWSConfigBucketExistenceCheck", - "Effect": "Allow", - "Principal": { - "Service": [ - "config.amazonaws.com" - ] + { + "Sid": "AWSConfigBucketExistenceCheck", + "Effect": "Allow", + "Principal": {"Service": ["config.amazonaws.com"]}, + "Action": "s3:ListBucket", + "Resource": f'arn:{event["partition"]}:s3:::{bucket}', }, - "Action": "s3:ListBucket", - "Resource": f'arn:{event["partition"]}:s3:::{bucket}' - }, - { - "Sid": "AWSConfigBucketDelivery", - "Effect": "Allow", - "Principal": { - "Service": [ - "config.amazonaws.com" - ] + { + "Sid": "AWSConfigBucketDelivery", + "Effect": "Allow", + "Principal": {"Service": ["config.amazonaws.com"]}, + "Action": "s3:PutObject", + "Resource": f'arn:{event["partition"]}:s3:::{bucket}/*', + "Condition": { + "StringEquals": {"s3:x-amz-acl": "bucket-owner-full-control"} + }, }, - "Action": "s3:PutObject", - "Resource": f'arn:{event["partition"]}:s3:::{bucket}/*', - "Condition": { - "StringEquals": { - "s3:x-amz-acl": "bucket-owner-full-control" - } - } - } - ] + ], } - BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region - ) - s3 = botocore.session.get_session().create_client('s3', config=BOTO_CONFIG) + BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) + s3 = botocore.session.get_session().create_client("s3", config=BOTO_CONFIG) s3_stubber = Stubber(s3) s3_stubber.add_response( - 'create_bucket', + "create_bucket", {}, { - 'ACL': 'private', - 'Bucket': bucket, - 'CreateBucketConfiguration': { - 'LocationConstraint': event["region"] - } - } + "ACL": "private", + "Bucket": bucket, + "CreateBucketConfiguration": {"LocationConstraint": event["region"]}, + }, ) s3_stubber.add_response( - 'put_bucket_encryption', + "put_bucket_encryption", {}, { - 'Bucket': bucket, - 'ServerSideEncryptionConfiguration': { - 'Rules': [ + "Bucket": bucket, + "ServerSideEncryptionConfiguration": { + "Rules": [ { - 'ApplyServerSideEncryptionByDefault': { - 'SSEAlgorithm': 'aws:kms', - 'KMSMasterKeyID': '1234abcd-12ab-34cd-56ef-1234567890ab' - } + "ApplyServerSideEncryptionByDefault": { + "SSEAlgorithm": "aws:kms", + "KMSMasterKeyID": "1234abcd-12ab-34cd-56ef-1234567890ab", + } } ] - } - } + }, + }, ) s3_stubber.add_response( - 'put_public_access_block', + "put_public_access_block", {}, { - 'Bucket': bucket, - 'PublicAccessBlockConfiguration': { - 'BlockPublicAcls': True, - 'IgnorePublicAcls': True, - 'BlockPublicPolicy': True, - 'RestrictPublicBuckets': True - } - } + "Bucket": bucket, + "PublicAccessBlockConfiguration": { + "BlockPublicAcls": True, + "IgnorePublicAcls": True, + "BlockPublicPolicy": True, + "RestrictPublicBuckets": True, + }, + }, ) s3_stubber.add_response( - 'put_bucket_logging', + "put_bucket_logging", {}, { - 'Bucket': bucket, - 'BucketLoggingStatus': { - 'LoggingEnabled': { - 'TargetBucket': event["logging_bucket"], - 'TargetPrefix': f'access-logs/{bucket}' - } - } - } + "Bucket": bucket, + "BucketLoggingStatus": { + "LoggingEnabled": { + "TargetBucket": event["logging_bucket"], + "TargetPrefix": f"access-logs/{bucket}", + } + }, + }, ) s3_stubber.add_response( - 'put_bucket_policy', - {}, - { - 'Bucket': bucket, - 'Policy': json.dumps(bucket_policy) - } + "put_bucket_policy", {}, {"Bucket": bucket, "Policy": json.dumps(bucket_policy)} ) s3_stubber.activate() - mocker.patch('EnableAWSConfig_createconfigbucket.connect_to_s3', return_value=s3) + mocker.patch("EnableAWSConfig_createconfigbucket.connect_to_s3", return_value=s3) createconfigbucket.create_encrypted_bucket(event, {}) s3_stubber.assert_no_pending_responses() s3_stubber.deactivate() + def test_bucket_already_exists(mocker): event = { - 'kms_key_arn': f'arn:aws:kms:us-west-2:111111111111:key/1234abcd-12ab-34cd-56ef-1234567890ab', - 'partition': 'aws', - 'account': '111111111111', - 'region': 'us-west-2', - 'logging_bucket': 'mahfakebukkit' + "kms_key_arn": "arn:aws:kms:us-west-2:111111111111:key/1234abcd-12ab-34cd-56ef-1234567890ab", + "partition": "aws", + "account": "111111111111", + "region": "us-west-2", + "logging_bucket": "mahfakebukkit", } - BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region - ) - s3 = botocore.session.get_session().create_client('s3', config=BOTO_CONFIG) + BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) + s3 = botocore.session.get_session().create_client("s3", config=BOTO_CONFIG) s3_stubber = Stubber(s3) - s3_stubber.add_client_error( - 'create_bucket', - 'BucketAlreadyExists' - ) + s3_stubber.add_client_error("create_bucket", "BucketAlreadyExists") s3_stubber.activate() - mocker.patch('EnableAWSConfig_createconfigbucket.connect_to_s3', return_value=s3) + mocker.patch("EnableAWSConfig_createconfigbucket.connect_to_s3", return_value=s3) createconfigbucket.create_encrypted_bucket(event, {}) s3_stubber.assert_no_pending_responses() s3_stubber.deactivate() + def test_create_topic(mocker): event = { - 'kms_key_arn': f'arn:aws:kms:us-west-2:111111111111:key/1234abcd-12ab-34cd-56ef-1234567890ab', - 'topic_name': 'sharr-test' + "kms_key_arn": "arn:aws:kms:us-west-2:111111111111:key/1234abcd-12ab-34cd-56ef-1234567890ab", + "topic_name": "sharr-test", } - BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region - ) - sns = botocore.session.get_session().create_client('sns', config=BOTO_CONFIG) - mocker.patch('EnableAWSConfig_createtopic.connect_to_sns', return_value=sns) + BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) + sns = botocore.session.get_session().create_client("sns", config=BOTO_CONFIG) + mocker.patch("EnableAWSConfig_createtopic.connect_to_sns", return_value=sns) sns_stubber = Stubber(sns) - ssm = botocore.session.get_session().create_client('ssm', config=BOTO_CONFIG) - mocker.patch('EnableAWSConfig_createtopic.connect_to_ssm', return_value=ssm) + ssm = botocore.session.get_session().create_client("ssm", config=BOTO_CONFIG) + mocker.patch("EnableAWSConfig_createtopic.connect_to_ssm", return_value=ssm) ssm_stubber = Stubber(ssm) sns_stubber.add_response( - 'create_topic', + "create_topic", + {"TopicArn": "arn:aws:sns:us-west-2:111111111111:sharr-test"}, { - 'TopicArn': 'arn:aws:sns:us-west-2:111111111111:sharr-test' + "Name": event["topic_name"], + "Attributes": {"KmsMasterKeyId": event["kms_key_arn"].split("key/")[1]}, }, - { - 'Name': event["topic_name"], - 'Attributes': { - 'KmsMasterKeyId': event["kms_key_arn"].split('key/')[1] - } - } ) ssm_stubber.add_response( - 'put_parameter', + "put_parameter", {}, { - 'Name': '/Solutions/SO0111/SNS_Topic_Config.1', - 'Description': 'SNS Topic for AWS Config updates', - 'Type': 'String', - 'Overwrite': True, - 'Value': 'arn:aws:sns:us-west-2:111111111111:sharr-test' - } + "Name": "/Solutions/SO0111/SNS_Topic_Config.1", + "Description": "SNS Topic for AWS Config updates", + "Type": "String", + "Overwrite": True, + "Value": "arn:aws:sns:us-west-2:111111111111:sharr-test", + }, ) sns_stubber.add_response( - 'set_topic_attributes', + "set_topic_attributes", {}, { - 'TopicArn': 'arn:aws:sns:us-west-2:111111111111:sharr-test', - 'AttributeName': 'Policy', - 'AttributeValue': ANY - } + "TopicArn": "arn:aws:sns:us-west-2:111111111111:sharr-test", + "AttributeName": "Policy", + "AttributeValue": ANY, + }, ) sns_stubber.activate() @@ -246,45 +204,34 @@ def test_create_topic(mocker): sns_stubber.deactivate() ssm_stubber.deactivate() + def test_create_topic_already_exists(mocker): event = { - 'kms_key_arn': f'arn:aws:kms:us-west-2:111111111111:key/1234abcd-12ab-34cd-56ef-1234567890ab', - 'topic_name': 'sharr-test' + "kms_key_arn": "arn:aws:kms:us-west-2:111111111111:key/1234abcd-12ab-34cd-56ef-1234567890ab", + "topic_name": "sharr-test", } - BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region - ) - sns = botocore.session.get_session().create_client('sns', config=BOTO_CONFIG) - mocker.patch('EnableAWSConfig_createtopic.connect_to_sns', return_value=sns) + BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) + sns = botocore.session.get_session().create_client("sns", config=BOTO_CONFIG) + mocker.patch("EnableAWSConfig_createtopic.connect_to_sns", return_value=sns) sns_stubber = Stubber(sns) - sns_stubber.add_client_error( - 'create_topic', - 'InvalidParameter' - ) + sns_stubber.add_client_error("create_topic", "InvalidParameter") sns_stubber.add_response( - 'create_topic', - { - 'TopicArn': 'arn:aws:sns:us-west-2:111111111111:sharr-test' - }, - { - 'Name': event["topic_name"] - } + "create_topic", + {"TopicArn": "arn:aws:sns:us-west-2:111111111111:sharr-test"}, + {"Name": event["topic_name"]}, ) sns_stubber.add_response( - 'set_topic_attributes', + "set_topic_attributes", {}, { - 'TopicArn': 'arn:aws:sns:us-west-2:111111111111:sharr-test', - 'AttributeName': 'Policy', - 'AttributeValue': ANY - } + "TopicArn": "arn:aws:sns:us-west-2:111111111111:sharr-test", + "AttributeName": "Policy", + "AttributeValue": ANY, + }, ) sns_stubber.activate() @@ -294,62 +241,54 @@ def test_create_topic_already_exists(mocker): sns_stubber.assert_no_pending_responses() sns_stubber.deactivate() + def test_enable_config(mocker): event = { - 'partition': 'aws', - 'account': '111111111111', - 'config_bucket': 'mahfakebukkit', - 'topic_arn': 'arn:aws:sns:us-west-2:111111111111:sharr-test', - 'aws_service_role': 'foobarbaz' + "partition": "aws", + "account": "111111111111", + "config_bucket": "mahfakebukkit", + "topic_arn": "arn:aws:sns:us-west-2:111111111111:sharr-test", + "aws_service_role": "foobarbaz", } - BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region - ) - cfg = botocore.session.get_session().create_client('config', config=BOTO_CONFIG) - mocker.patch('EnableAWSConfig_enableconfig.connect_to_config', return_value=cfg) + BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) + cfg = botocore.session.get_session().create_client("config", config=BOTO_CONFIG) + mocker.patch("EnableAWSConfig_enableconfig.connect_to_config", return_value=cfg) cfg_stubber = Stubber(cfg) cfg_stubber.add_response( - 'put_configuration_recorder', + "put_configuration_recorder", {}, { - 'ConfigurationRecorder': { - 'name': 'default', - 'roleARN': f'arn:aws:iam::{event["account"]}:role/{event["aws_service_role"]}', - 'recordingGroup': { - 'allSupported': True, - 'includeGlobalResourceTypes': True - } + "ConfigurationRecorder": { + "name": "default", + "roleARN": f'arn:aws:iam::{event["account"]}:role/{event["aws_service_role"]}', + "recordingGroup": { + "allSupported": True, + "includeGlobalResourceTypes": True, + }, } - } + }, ) cfg_stubber.add_response( - 'put_delivery_channel', + "put_delivery_channel", {}, { - 'DeliveryChannel': { - 'name': 'default', - 's3BucketName': event['config_bucket'], - 's3KeyPrefix': event['account'], - 'snsTopicARN': event['topic_arn'], - 'configSnapshotDeliveryProperties': { - 'deliveryFrequency': 'Twelve_Hours' - } + "DeliveryChannel": { + "name": "default", + "s3BucketName": event["config_bucket"], + "s3KeyPrefix": event["account"], + "snsTopicARN": event["topic_arn"], + "configSnapshotDeliveryProperties": { + "deliveryFrequency": "Twelve_Hours" + }, } - } + }, ) cfg_stubber.add_response( - 'start_configuration_recorder', - {}, - { - 'ConfigurationRecorderName': 'default' - } + "start_configuration_recorder", {}, {"ConfigurationRecorderName": "default"} ) cfg_stubber.activate() @@ -359,41 +298,32 @@ def test_enable_config(mocker): cfg_stubber.assert_no_pending_responses() cfg_stubber.deactivate() + def test_enable_config_already_enabled(mocker): event = { - 'partition': 'aws', - 'account': '111111111111', - 'config_bucket': 'mahfakebukkit', - 'topic_arn': 'arn:aws:sns:us-west-2:111111111111:sharr-test', - 'aws_service_role': 'foobarbaz' + "partition": "aws", + "account": "111111111111", + "config_bucket": "mahfakebukkit", + "topic_arn": "arn:aws:sns:us-west-2:111111111111:sharr-test", + "aws_service_role": "foobarbaz", } - BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region - ) - cfg = botocore.session.get_session().create_client('config', config=BOTO_CONFIG) - mocker.patch('EnableAWSConfig_enableconfig.connect_to_config', return_value=cfg) + BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) + cfg = botocore.session.get_session().create_client("config", config=BOTO_CONFIG) + mocker.patch("EnableAWSConfig_enableconfig.connect_to_config", return_value=cfg) cfg_stubber = Stubber(cfg) cfg_stubber.add_client_error( - 'put_configuration_recorder', - 'MaxNumberOfConfigurationRecordersExceededException' + "put_configuration_recorder", + "MaxNumberOfConfigurationRecordersExceededException", ) cfg_stubber.add_client_error( - 'put_delivery_channel', - 'MaxNumberOfDeliveryChannelsExceededException' + "put_delivery_channel", "MaxNumberOfDeliveryChannelsExceededException" ) cfg_stubber.add_response( - 'start_configuration_recorder', - {}, - { - 'ConfigurationRecorderName': 'default' - } + "start_configuration_recorder", {}, {"ConfigurationRecorderName": "default"} ) cfg_stubber.activate() @@ -403,16 +333,14 @@ def test_enable_config_already_enabled(mocker): cfg_stubber.assert_no_pending_responses() cfg_stubber.deactivate() + def test_summary(): event = { - 'config_bucket': 'mahfakebukkit', - 'logging_bucket': 'loggingbukkit', - 'sns_topic_arn': 'arn:aws:sns:us-west-2:111111111111:sharr-test', + "config_bucket": "mahfakebukkit", + "logging_bucket": "loggingbukkit", + "sns_topic_arn": "arn:aws:sns:us-west-2:111111111111:sharr-test", } assert summary.process_results(event, {}) == { - "response": { - "message": "AWS Config successfully enabled", - "status": "Success" - } + "response": {"message": "AWS Config successfully enabled", "status": "Success"} } diff --git a/source/remediation_runbooks/scripts/test/test_enablecloudtrailencryption.py b/source/remediation_runbooks/scripts/test/test_enablecloudtrailencryption.py index f5778b76..0b152424 100644 --- a/source/remediation_runbooks/scripts/test/test_enablecloudtrailencryption.py +++ b/source/remediation_runbooks/scripts/test/test_enablecloudtrailencryption.py @@ -2,57 +2,49 @@ # SPDX-License-Identifier: Apache-2.0 import boto3 import botocore.session -from botocore.stub import Stubber -from botocore.config import Config -import pytest -from pytest_mock import mocker - import EnableCloudTrailEncryption as validate +from botocore.config import Config +from botocore.stub import Stubber my_session = boto3.session.Session() my_region = my_session.region_name -#===================================================================================== + +# ===================================================================================== # EnableCloudTrailEncryption SUCCESS -#===================================================================================== +# ===================================================================================== def test_EnableCloudTrailEncryption_success(mocker): event = { - 'SolutionId': 'SO0000', - 'SolutionVersion': '1.2.3', - 'trail': 'foobarbaz', - 'trail_region': my_region, - 'exec_region': my_region, - 'kms_key_arn': f'arn:aws:kms:{my_region}:111111111111:key' + "SolutionId": "SO0000", + "SolutionVersion": "1.2.3", + "trail": "foobarbaz", + "trail_region": my_region, + "exec_region": my_region, + "kms_key_arn": f"arn:aws:kms:{my_region}:111111111111:key", } - BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region - ) + BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) - ### LOGS - ct_client = botocore.session.get_session().create_client('cloudtrail', config=BOTO_CONFIG) + # LOGS + ct_client = botocore.session.get_session().create_client( + "cloudtrail", config=BOTO_CONFIG + ) ct_stubber = Stubber(ct_client) ct_stubber.add_response( - 'update_trail', - {}, - { - 'Name': event['trail'], - 'KmsKeyId': event['kms_key_arn'] - } + "update_trail", {}, {"Name": event["trail"], "KmsKeyId": event["kms_key_arn"]} ) ct_stubber.activate() - mocker.patch('EnableCloudTrailEncryption.connect_to_cloudtrail', return_value=ct_client) + mocker.patch( + "EnableCloudTrailEncryption.connect_to_cloudtrail", return_value=ct_client + ) assert validate.enable_trail_encryption(event, {}) == { "response": { "message": f'Enabled KMS CMK encryption on {event["trail"]}', - "status": "Success" + "status": "Success", } } diff --git a/source/remediation_runbooks/scripts/test/test_enablecloudtrailtocloudwatchlogging.py b/source/remediation_runbooks/scripts/test/test_enablecloudtrailtocloudwatchlogging.py index a6da1c0b..8c8bb02a 100644 --- a/source/remediation_runbooks/scripts/test/test_enablecloudtrailtocloudwatchlogging.py +++ b/source/remediation_runbooks/scripts/test/test_enablecloudtrailtocloudwatchlogging.py @@ -1,35 +1,36 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 +from typing import Dict, List + import boto3 -import json import botocore.session -from botocore.stub import Stubber -from botocore.config import Config -import pytest -from pytest_mock import mocker - import EnableCloudTrailToCloudWatchLogging_waitforloggroup as validation +import pytest +from botocore.config import Config +from botocore.stub import Stubber my_session = boto3.session.Session() my_region = my_session.region_name + @pytest.fixture(autouse=True) def patch_sleep_between_attempts(mocker): - mocker.patch('EnableCloudTrailToCloudWatchLogging_waitforloggroup.sleep_between_attempts') + mocker.patch( + "EnableCloudTrailToCloudWatchLogging_waitforloggroup.sleep_between_attempts" + ) + -#===================================================================================== +# ===================================================================================== # EnableCloudTrailToCloudWatchLogging_waitforloggroup -#===================================================================================== +# ===================================================================================== def test_validation_success(mocker): event = { - 'SolutionId': 'SO0000', - 'SolutionVersion': '1.2.3', - 'LogGroup': 'my_loggroup', - 'region': my_region - } - notyet_response = { - "logGroups": [] + "SolutionId": "SO0000", + "SolutionVersion": "1.2.3", + "LogGroup": "my_loggroup", + "region": my_region, } + notyet_response: Dict[str, List[str]] = {"logGroups": []} good_response = { "logGroups": [ { @@ -37,91 +38,67 @@ def test_validation_success(mocker): "creationTime": 1576239692739, "metricFilterCount": 0, "arn": "arn:aws:logs:us-east-1:111111111111:log-group:my_loggroup:*", - "storedBytes": 109 + "storedBytes": 109, } ] } - BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region + BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) + cwl_client = botocore.session.get_session().create_client( + "logs", config=BOTO_CONFIG ) - cwl_client = botocore.session.get_session().create_client('logs', config=BOTO_CONFIG) cwl_stubber = Stubber(cwl_client) - cwl_stubber.add_response( - 'describe_log_groups', - notyet_response - ) - cwl_stubber.add_response( - 'describe_log_groups', - notyet_response - ) - cwl_stubber.add_response( - 'describe_log_groups', - good_response - ) + cwl_stubber.add_response("describe_log_groups", notyet_response) + cwl_stubber.add_response("describe_log_groups", notyet_response) + cwl_stubber.add_response("describe_log_groups", good_response) cwl_stubber.activate() - mocker.patch('EnableCloudTrailToCloudWatchLogging_waitforloggroup.connect_to_logs', return_value=cwl_client) + mocker.patch( + "EnableCloudTrailToCloudWatchLogging_waitforloggroup.connect_to_logs", + return_value=cwl_client, + ) - assert validation.wait_for_loggroup(event, {}) == "arn:aws:logs:us-east-1:111111111111:log-group:my_loggroup:*" + assert ( + validation.wait_for_loggroup(event, {}) + == "arn:aws:logs:us-east-1:111111111111:log-group:my_loggroup:*" + ) cwl_stubber.deactivate() + def test_validation_failed(mocker): event = { - 'SolutionId': 'SO0000', - 'SolutionVersion': '1.2.3', - 'LogGroup': 'my_loggroup', - 'region': my_region - } - notyet_response = { - "logGroups": [] - } - good_response = { - "logGroups": [ - { - "logGroupName": "my_loggroup", - "creationTime": 1576239692739, - "metricFilterCount": 0, - "arn": "arn:aws:logs:us-east-1:111111111111:log-group:my_loggroup:*", - "storedBytes": 109 - } - ] + "SolutionId": "SO0000", + "SolutionVersion": "1.2.3", + "LogGroup": "my_loggroup", + "region": my_region, } + notyet_response: Dict[str, List[str]] = {"logGroups": []} - BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region + BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) + cwl_client = botocore.session.get_session().create_client( + "logs", config=BOTO_CONFIG ) - cwl_client = botocore.session.get_session().create_client('logs', config=BOTO_CONFIG) cwl_stubber = Stubber(cwl_client) - cwl_stubber.add_response( - 'describe_log_groups', - notyet_response - ) - cwl_stubber.add_response( - 'describe_log_groups', - notyet_response - ) - cwl_stubber.add_response( - 'describe_log_groups', - notyet_response - ) + cwl_stubber.add_response("describe_log_groups", notyet_response) + cwl_stubber.add_response("describe_log_groups", notyet_response) + cwl_stubber.add_response("describe_log_groups", notyet_response) cwl_stubber.activate() - mocker.patch('EnableCloudTrailToCloudWatchLogging_waitforloggroup.connect_to_logs', return_value=cwl_client) + mocker.patch( + "EnableCloudTrailToCloudWatchLogging_waitforloggroup.connect_to_logs", + return_value=cwl_client, + ) with pytest.raises(SystemExit) as pytest_wrapped_e: - parsed_event = validation.wait_for_loggroup(event, {}) + validation.wait_for_loggroup(event, {}) assert pytest_wrapped_e.type == SystemExit - assert pytest_wrapped_e.value.code == 'Failed to create Log Group my_loggroup: Timed out' + assert ( + pytest_wrapped_e.value.code + == "Failed to create Log Group my_loggroup: Timed out" + ) cwl_stubber.deactivate() diff --git a/source/remediation_runbooks/scripts/test/test_enablevpcflowlogs.py b/source/remediation_runbooks/scripts/test/test_enablevpcflowlogs.py index f50a4bb9..a91f57c8 100644 --- a/source/remediation_runbooks/scripts/test/test_enablevpcflowlogs.py +++ b/source/remediation_runbooks/scripts/test/test_enablevpcflowlogs.py @@ -1,34 +1,37 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 +from typing import Dict, List + import boto3 import botocore.session -from botocore.stub import Stubber -from botocore.config import Config -import pytest -from pytest_mock import mocker - import EnableVPCFlowLogs as validate +import pytest +from botocore.config import Config +from botocore.stub import Stubber my_session = boto3.session.Session() my_region = my_session.region_name + @pytest.fixture(autouse=True) def patch_wait_for_seconds(mocker): - mocker.patch('EnableVPCFlowLogs.wait_for_seconds') + mocker.patch("EnableVPCFlowLogs.wait_for_seconds") + -#===================================================================================== +# ===================================================================================== # EnableVPCFlowLogging_enable_flow_logs SUCCESS -#===================================================================================== +# ===================================================================================== def test_EnableVPCFlowLogs_success(mocker): event = { - 'vpc': 'vpc-123412341234abcde', - 'kms_key_arn': 'arn:aws:kms:us-west-2:111111111111:key/1234abcd-12ab-34cd-56ef-1234567890ab', - 'remediation_role': 'remediation-role-name', - 'region': my_region, - 'retries': 1, 'wait': 1 # for testing, so not waiting 60 seconds for a stub + "vpc": "vpc-123412341234abcde", + "kms_key_arn": "arn:aws:kms:us-west-2:111111111111:key/1234abcd-12ab-34cd-56ef-1234567890ab", + "remediation_role": "remediation-role-name", + "region": my_region, + "retries": 1, + "wait": 1, # for testing, so not waiting 60 seconds for a stub } - log_group_name = 'VPCFlowLogs/' + event['vpc'] + log_group_name = "VPCFlowLogs/" + event["vpc"] # type: ignore[operator] describe_log_groups_simulated_response = { "logGroups": [ @@ -36,13 +39,13 @@ def test_EnableVPCFlowLogs_success(mocker): "logGroupName": log_group_name, "creationTime": 1614006547370, "metricFilterCount": 0, - "arn": f'arn:aws:logs:us-east-1:111111111111:log-group:{log_group_name}:*', - "storedBytes": 36202 + "arn": f"arn:aws:logs:us-east-1:111111111111:log-group:{log_group_name}:*", + "storedBytes": 36202, } ] } describe_flow_logs_simulated_response = { - 'FlowLogs': [ + "FlowLogs": [ { "CreationTime": "2020-10-27T19:37:52.871000+00:00", "DeliverLogsPermissionArn": f'arn:aws:iam::111111111111:role/{event["remediation_role"]}_{my_region}', @@ -55,100 +58,94 @@ def test_EnableVPCFlowLogs_success(mocker): "LogDestinationType": "cloud-watch-logs", "LogFormat": "${version} ${account-id} ${interface-id} ${srcaddr} ${dstaddr} ${srcport} ${dstport} ${protocol} ${packets} ${bytes} ${start} ${end} ${action} ${log-status}", "Tags": [], - "MaxAggregationInterval": 600 + "MaxAggregationInterval": 600, } ] } - BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region - ) + BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) - ### LOGS - logs_client = botocore.session.get_session().create_client('logs', config=BOTO_CONFIG) + # LOGS + logs_client = botocore.session.get_session().create_client( + "logs", config=BOTO_CONFIG + ) logs_stubber = Stubber(logs_client) logs_stubber.add_response( - 'create_log_group', + "create_log_group", {}, { - 'logGroupName': log_group_name, - 'kmsKeyId': 'arn:aws:kms:us-west-2:111111111111:key/1234abcd-12ab-34cd-56ef-1234567890ab' - } + "logGroupName": log_group_name, + "kmsKeyId": "arn:aws:kms:us-west-2:111111111111:key/1234abcd-12ab-34cd-56ef-1234567890ab", + }, ) logs_stubber.add_response( - 'describe_log_groups', + "describe_log_groups", describe_log_groups_simulated_response, - { - 'logGroupNamePrefix': log_group_name - } + {"logGroupNamePrefix": log_group_name}, ) logs_stubber.activate() - ### EC2 - ec2_client = botocore.session.get_session().create_client('ec2', config=BOTO_CONFIG) + # EC2 + ec2_client = botocore.session.get_session().create_client("ec2", config=BOTO_CONFIG) ec2_stubber = Stubber(ec2_client) ec2_stubber.add_response( - 'create_flow_logs', + "create_flow_logs", {}, { - 'DryRun': False, - 'DeliverLogsPermissionArn': event['remediation_role'], - 'LogGroupName': 'VPCFlowLogs/' + event['vpc'], - 'ResourceIds': [event['vpc']], - 'ResourceType': 'VPC', - 'TrafficType': 'REJECT', - 'LogDestinationType': 'cloud-watch-logs' - } + "DryRun": False, + "DeliverLogsPermissionArn": event["remediation_role"], + "LogGroupName": "VPCFlowLogs/" + event["vpc"], # type: ignore[operator] + "ResourceIds": [event["vpc"]], + "ResourceType": "VPC", + "TrafficType": "REJECT", + "LogDestinationType": "cloud-watch-logs", + }, ) ec2_stubber.add_response( - 'describe_flow_logs', + "describe_flow_logs", describe_flow_logs_simulated_response, { - 'DryRun': False, - 'Filters': [ - { - 'Name': 'log-group-name', - 'Values': [ 'VPCFlowLogs/' + event['vpc'] ] - } - ] - } + "DryRun": False, + "Filters": [ + {"Name": "log-group-name", "Values": ["VPCFlowLogs/" + event["vpc"]]} # type: ignore[operator] + ], + }, ) ec2_stubber.activate() - mocker.patch('EnableVPCFlowLogs.connect_to_logs', return_value=logs_client) - mocker.patch('EnableVPCFlowLogs.connect_to_ec2', return_value=ec2_client) + mocker.patch("EnableVPCFlowLogs.connect_to_logs", return_value=logs_client) + mocker.patch("EnableVPCFlowLogs.connect_to_ec2", return_value=ec2_client) assert validate.enable_flow_logs(event, {}) == { "response": { "message": f'VPC Flow Logs enabled for {event["vpc"]} to VPCFlowLogs/{event["vpc"]}', - "status": "Success" + "status": "Success", } } logs_stubber.deactivate() ec2_stubber.deactivate() -#===================================================================================== + +# ===================================================================================== # EnableVPCFlowLogging_enable_flow_logs loggroup already exists -#===================================================================================== +# ===================================================================================== def test_EnableVPCFlowLogs_loggroup_exists(mocker): event = { - 'vpc': 'vpc-123412341234abcde', - 'remediation_role': 'remediation-role-name', - 'kms_key_arn': 'arn:aws:kms:us-west-2:111111111111:key/1234abcd-12ab-34cd-56ef-1234567890ab', - 'region': my_region, - 'retries': 1, 'wait': 1 # for testing, so not waiting 60 seconds for a stub + "vpc": "vpc-123412341234abcde", + "remediation_role": "remediation-role-name", + "kms_key_arn": "arn:aws:kms:us-west-2:111111111111:key/1234abcd-12ab-34cd-56ef-1234567890ab", + "region": my_region, + "retries": 1, + "wait": 1, # for testing, so not waiting 60 seconds for a stub } - log_group_name = 'VPCFlowLogs/' + event['vpc'] + log_group_name = "VPCFlowLogs/" + event["vpc"] # type: ignore[operator] describe_log_groups_simulated_response = { "logGroups": [ @@ -156,13 +153,13 @@ def test_EnableVPCFlowLogs_loggroup_exists(mocker): "logGroupName": log_group_name, "creationTime": 1614006547370, "metricFilterCount": 0, - "arn": f'arn:aws:logs:us-east-1:111111111111:log-group:{log_group_name}:*', - "storedBytes": 36202 + "arn": f"arn:aws:logs:us-east-1:111111111111:log-group:{log_group_name}:*", + "storedBytes": 36202, } ] } describe_flow_logs_simulated_response = { - 'FlowLogs': [ + "FlowLogs": [ { "CreationTime": "2020-10-27T19:37:52.871000+00:00", "DeliverLogsPermissionArn": f'arn:aws:iam::111111111111:role/{event["remediation_role"]}_{my_region}', @@ -175,155 +172,143 @@ def test_EnableVPCFlowLogs_loggroup_exists(mocker): "LogDestinationType": "cloud-watch-logs", "LogFormat": "${version} ${account-id} ${interface-id} ${srcaddr} ${dstaddr} ${srcport} ${dstport} ${protocol} ${packets} ${bytes} ${start} ${end} ${action} ${log-status}", "Tags": [], - "MaxAggregationInterval": 600 + "MaxAggregationInterval": 600, } ] } - BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region - ) + BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) - ### LOGS - logs_client = botocore.session.get_session().create_client('logs', config=BOTO_CONFIG) + # LOGS + logs_client = botocore.session.get_session().create_client( + "logs", config=BOTO_CONFIG + ) logs_stubber = Stubber(logs_client) - logs_stubber.add_client_error( - 'create_log_group', - 'ResourceAlreadyExistsException' - ) + logs_stubber.add_client_error("create_log_group", "ResourceAlreadyExistsException") logs_stubber.add_response( - 'describe_log_groups', + "describe_log_groups", describe_log_groups_simulated_response, - { - 'logGroupNamePrefix': log_group_name - } + {"logGroupNamePrefix": log_group_name}, ) logs_stubber.activate() - ### EC2 - ec2_client = botocore.session.get_session().create_client('ec2', config=BOTO_CONFIG) + # EC2 + ec2_client = botocore.session.get_session().create_client("ec2", config=BOTO_CONFIG) ec2_stubber = Stubber(ec2_client) ec2_stubber.add_response( - 'create_flow_logs', + "create_flow_logs", {}, { - 'DryRun': False, - 'DeliverLogsPermissionArn': event['remediation_role'], - 'LogGroupName': 'VPCFlowLogs/' + event['vpc'], - 'ResourceIds': [event['vpc']], - 'ResourceType': 'VPC', - 'TrafficType': 'REJECT', - 'LogDestinationType': 'cloud-watch-logs' - } + "DryRun": False, + "DeliverLogsPermissionArn": event["remediation_role"], + "LogGroupName": "VPCFlowLogs/" + event["vpc"], # type: ignore[operator] + "ResourceIds": [event["vpc"]], + "ResourceType": "VPC", + "TrafficType": "REJECT", + "LogDestinationType": "cloud-watch-logs", + }, ) ec2_stubber.add_response( - 'describe_flow_logs', + "describe_flow_logs", describe_flow_logs_simulated_response, { - 'DryRun': False, - 'Filters': [ - { - 'Name': 'log-group-name', - 'Values': [ 'VPCFlowLogs/' + event['vpc'] ] - } - ] - } + "DryRun": False, + "Filters": [ + {"Name": "log-group-name", "Values": ["VPCFlowLogs/" + event["vpc"]]} # type: ignore[operator] + ], + }, ) ec2_stubber.activate() - mocker.patch('EnableVPCFlowLogs.connect_to_logs', return_value=logs_client) - mocker.patch('EnableVPCFlowLogs.connect_to_ec2', return_value=ec2_client) + mocker.patch("EnableVPCFlowLogs.connect_to_logs", return_value=logs_client) + mocker.patch("EnableVPCFlowLogs.connect_to_ec2", return_value=ec2_client) assert validate.enable_flow_logs(event, {}) == { "response": { "message": f'VPC Flow Logs enabled for {event["vpc"]} to VPCFlowLogs/{event["vpc"]}', - "status": "Success" + "status": "Success", } } logs_stubber.deactivate() ec2_stubber.deactivate() -#===================================================================================== + +# ===================================================================================== # EnableVPCFlowLogging_enable_flow_logs FAILED TO CREATE LOGGROUP -#===================================================================================== +# ===================================================================================== def test_EnableVPCFlowLogs_loggroup_fails(mocker): retries = 3 event = { - 'vpc': 'vpc-123412341234abcde', - 'remediation_role': 'remediation-role-name', - 'kms_key_arn': 'arn:aws:kms:us-west-2:111111111111:key/1234abcd-12ab-34cd-56ef-1234567890ab', - 'region': my_region, - 'retries': retries, 'wait': 1 # for testing, so not waiting 60 seconds for a stub + "vpc": "vpc-123412341234abcde", + "remediation_role": "remediation-role-name", + "kms_key_arn": "arn:aws:kms:us-west-2:111111111111:key/1234abcd-12ab-34cd-56ef-1234567890ab", + "region": my_region, + "retries": retries, + "wait": 1, # for testing, so not waiting 60 seconds for a stub } - log_group_name = 'VPCFlowLogs/' + event['vpc'] + log_group_name = "VPCFlowLogs/" + event["vpc"] # type: ignore[operator] - describe_log_groups_simulated_response = { - "logGroups": [ - ] - } + describe_log_groups_simulated_response: Dict[str, List[str]] = {"logGroups": []} - BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region - ) + BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) - ### LOGS - logs_client = botocore.session.get_session().create_client('logs', config=BOTO_CONFIG) + # LOGS + logs_client = botocore.session.get_session().create_client( + "logs", config=BOTO_CONFIG + ) logs_stubber = Stubber(logs_client) logs_stubber.add_response( - 'create_log_group', + "create_log_group", {}, { - 'logGroupName': log_group_name, - 'kmsKeyId': 'arn:aws:kms:us-west-2:111111111111:key/1234abcd-12ab-34cd-56ef-1234567890ab' - } + "logGroupName": log_group_name, + "kmsKeyId": "arn:aws:kms:us-west-2:111111111111:key/1234abcd-12ab-34cd-56ef-1234567890ab", + }, ) for x in range(retries): logs_stubber.add_response( - 'describe_log_groups', + "describe_log_groups", describe_log_groups_simulated_response, - { - 'logGroupNamePrefix': log_group_name - } + {"logGroupNamePrefix": log_group_name}, ) logs_stubber.activate() - mocker.patch('EnableVPCFlowLogs.connect_to_logs', return_value=logs_client) + mocker.patch("EnableVPCFlowLogs.connect_to_logs", return_value=logs_client) with pytest.raises(SystemExit) as pytest_wrapped_e: validate.enable_flow_logs(event, {}) - assert pytest_wrapped_e.value.code == 'Timeout waiting for log group VPCFlowLogs/vpc-123412341234abcde to become active' + assert ( + pytest_wrapped_e.value.code + == "Timeout waiting for log group VPCFlowLogs/vpc-123412341234abcde to become active" + ) logs_stubber.deactivate() -#===================================================================================== + +# ===================================================================================== # EnableVPCFlowLogging_enable_flow_logs FAILED TO ENABLE FLOW LOGS -#===================================================================================== +# ===================================================================================== def test_EnableVPCFlowLogs_flowlogs_failed(mocker): retries = 3 event = { - 'vpc': 'vpc-123412341234abcde', - 'remediation_role': 'remediation-role-name', - 'kms_key_arn': 'arn:aws:kms:us-west-2:111111111111:key/1234abcd-12ab-34cd-56ef-1234567890ab', - 'region': my_region, - 'retries': retries, 'wait': 1 # for testing, so not waiting 60 seconds for a stub + "vpc": "vpc-123412341234abcde", + "remediation_role": "remediation-role-name", + "kms_key_arn": "arn:aws:kms:us-west-2:111111111111:key/1234abcd-12ab-34cd-56ef-1234567890ab", + "region": my_region, + "retries": retries, + "wait": 1, # for testing, so not waiting 60 seconds for a stub } - log_group_name = 'VPCFlowLogs/' + event['vpc'] + log_group_name = "VPCFlowLogs/" + event["vpc"] # type: ignore[operator] describe_log_groups_simulated_response = { "logGroups": [ @@ -331,86 +316,82 @@ def test_EnableVPCFlowLogs_flowlogs_failed(mocker): "logGroupName": log_group_name, "creationTime": 1614006547370, "metricFilterCount": 0, - "arn": f'arn:aws:logs:us-east-1:111111111111:log-group:{log_group_name}:*', - "storedBytes": 36202 + "arn": f"arn:aws:logs:us-east-1:111111111111:log-group:{log_group_name}:*", + "storedBytes": 36202, } ] } - describe_flow_logs_simulated_response = { - 'FlowLogs': [] - } + describe_flow_logs_simulated_response: Dict[str, List[str]] = {"FlowLogs": []} - BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region - ) + BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) - ### LOGS - logs_client = botocore.session.get_session().create_client('logs', config=BOTO_CONFIG) + # LOGS + logs_client = botocore.session.get_session().create_client( + "logs", config=BOTO_CONFIG + ) logs_stubber = Stubber(logs_client) logs_stubber.add_response( - 'create_log_group', + "create_log_group", {}, { - 'logGroupName': log_group_name, - 'kmsKeyId': 'arn:aws:kms:us-west-2:111111111111:key/1234abcd-12ab-34cd-56ef-1234567890ab' - } + "logGroupName": log_group_name, + "kmsKeyId": "arn:aws:kms:us-west-2:111111111111:key/1234abcd-12ab-34cd-56ef-1234567890ab", + }, ) logs_stubber.add_response( - 'describe_log_groups', + "describe_log_groups", describe_log_groups_simulated_response, - { - 'logGroupNamePrefix': log_group_name - } + {"logGroupNamePrefix": log_group_name}, ) logs_stubber.activate() - ### EC2 - ec2_client = botocore.session.get_session().create_client('ec2', config=BOTO_CONFIG) + # EC2 + ec2_client = botocore.session.get_session().create_client("ec2", config=BOTO_CONFIG) ec2_stubber = Stubber(ec2_client) ec2_stubber.add_response( - 'create_flow_logs', + "create_flow_logs", {}, { - 'DryRun': False, - 'DeliverLogsPermissionArn': event['remediation_role'], - 'LogGroupName': 'VPCFlowLogs/' + event['vpc'], - 'ResourceIds': [event['vpc']], - 'ResourceType': 'VPC', - 'TrafficType': 'REJECT', - 'LogDestinationType': 'cloud-watch-logs' - } + "DryRun": False, + "DeliverLogsPermissionArn": event["remediation_role"], + "LogGroupName": "VPCFlowLogs/" + event["vpc"], # type: ignore[operator] + "ResourceIds": [event["vpc"]], + "ResourceType": "VPC", + "TrafficType": "REJECT", + "LogDestinationType": "cloud-watch-logs", + }, ) for x in range(retries): ec2_stubber.add_response( - 'describe_flow_logs', + "describe_flow_logs", describe_flow_logs_simulated_response, { - 'DryRun': False, - 'Filters': [ + "DryRun": False, + "Filters": [ { - 'Name': 'log-group-name', - 'Values': [ 'VPCFlowLogs/' + event['vpc'] ] + "Name": "log-group-name", + "Values": ["VPCFlowLogs/" + event["vpc"]], # type: ignore[operator] } - ] - } + ], + }, ) ec2_stubber.activate() - mocker.patch('EnableVPCFlowLogs.connect_to_logs', return_value=logs_client) - mocker.patch('EnableVPCFlowLogs.connect_to_ec2', return_value=ec2_client) + mocker.patch("EnableVPCFlowLogs.connect_to_logs", return_value=logs_client) + mocker.patch("EnableVPCFlowLogs.connect_to_ec2", return_value=ec2_client) with pytest.raises(SystemExit) as pytest_wrapped_e: validate.enable_flow_logs(event, {}) - assert pytest_wrapped_e.value.code == 'Timeout waiting for flowlogs to log group VPCFlowLogs/vpc-123412341234abcde to become active' + assert ( + pytest_wrapped_e.value.code + == "Timeout waiting for flowlogs to log group VPCFlowLogs/vpc-123412341234abcde to become active" + ) logs_stubber.deactivate() ec2_stubber.deactivate() diff --git a/source/remediation_runbooks/scripts/test/test_makeebssnapshotsprivate.py b/source/remediation_runbooks/scripts/test/test_makeebssnapshotsprivate.py index 856d17b6..a52ae421 100644 --- a/source/remediation_runbooks/scripts/test/test_makeebssnapshotsprivate.py +++ b/source/remediation_runbooks/scripts/test/test_makeebssnapshotsprivate.py @@ -1,25 +1,19 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 +from typing import Any, Dict + import boto3 -import json import botocore.session -from botocore.stub import Stubber, ANY -from botocore.config import Config -import pytest -from pytest_mock import mocker - import GetPublicEBSSnapshots as getsnaps import MakeEBSSnapshotsPrivate as updatesnaps +from botocore.config import Config +from botocore.stub import Stubber my_session = boto3.session.Session() my_region = my_session.region_name -BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region -) +BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) + def snaplist(): return { @@ -35,19 +29,10 @@ def snaplist(): "VolumeId": "vol-12341234123412345", "VolumeSize": 4, "Tags": [ - { - "Key": "SnapshotDate", - "Value": "2021-03-11 08:23:02.376859" - }, - { - "Key": "DeleteEBSVolOnCompletion", - "Value": "False" - }, - { - "Key": "SnapshotReason", - "Value": "Idle Volume" - } - ] + {"Key": "SnapshotDate", "Value": "2021-03-11 08:23:02.376859"}, + {"Key": "DeleteEBSVolOnCompletion", "Value": "False"}, + {"Key": "SnapshotReason", "Value": "Idle Volume"}, + ], }, { "Description": "Snapshot of idle volume before deletion", @@ -60,19 +45,10 @@ def snaplist(): "VolumeId": "vol-12341234123412345", "VolumeSize": 4, "Tags": [ - { - "Key": "DeleteEBSVolOnCompletion", - "Value": "False" - }, - { - "Key": "SnapshotDate", - "Value": "2021-03-11 08:20:37.224101" - }, - { - "Key": "SnapshotReason", - "Value": "Idle Volume" - } - ] + {"Key": "DeleteEBSVolOnCompletion", "Value": "False"}, + {"Key": "SnapshotDate", "Value": "2021-03-11 08:20:37.224101"}, + {"Key": "SnapshotReason", "Value": "Idle Volume"}, + ], }, { "Description": "Snapshot of idle volume before deletion", @@ -85,19 +61,10 @@ def snaplist(): "VolumeId": "vol-12341234123412345", "VolumeSize": 4, "Tags": [ - { - "Key": "SnapshotReason", - "Value": "Idle Volume" - }, - { - "Key": "SnapshotDate", - "Value": "2021-03-11 08:22:48.714893" - }, - { - "Key": "DeleteEBSVolOnCompletion", - "Value": "False" - } - ] + {"Key": "SnapshotReason", "Value": "Idle Volume"}, + {"Key": "SnapshotDate", "Value": "2021-03-11 08:22:48.714893"}, + {"Key": "DeleteEBSVolOnCompletion", "Value": "False"}, + ], }, { "Description": "Snapshot of idle volume before deletion", @@ -110,19 +77,10 @@ def snaplist(): "VolumeId": "vol-12341234123412345", "VolumeSize": 4, "Tags": [ - { - "Key": "DeleteEBSVolOnCompletion", - "Value": "False" - }, - { - "Key": "SnapshotReason", - "Value": "Idle Volume" - }, - { - "Key": "SnapshotDate", - "Value": "2021-03-11 08:23:04.876640" - } - ] + {"Key": "DeleteEBSVolOnCompletion", "Value": "False"}, + {"Key": "SnapshotReason", "Value": "Idle Volume"}, + {"Key": "SnapshotDate", "Value": "2021-03-11 08:23:04.876640"}, + ], }, { "Description": "Snapshot of idle volume before deletion", @@ -135,112 +93,98 @@ def snaplist(): "VolumeId": "vol-12341234123412345", "VolumeSize": 4, "Tags": [ - { - "Key": "DeleteEBSVolOnCompletion", - "Value": "False" - }, - { - "Key": "SnapshotReason", - "Value": "Idle Volume" - }, - { - "Key": "SnapshotDate", - "Value": "2021-03-11 08:22:34.671355" - } - ] - } + {"Key": "DeleteEBSVolOnCompletion", "Value": "False"}, + {"Key": "SnapshotReason", "Value": "Idle Volume"}, + {"Key": "SnapshotDate", "Value": "2021-03-11 08:22:34.671355"}, + ], + }, ] } + snapids = [ "snap-12341234123412345", "snap-12341234123412345", "snap-12341234123412345", "snap-12341234123412345", - "snap-12341234123412345" + "snap-12341234123412345", ] + def test_get_snaps(mocker): event = { - 'account_id': '111111111111', + "account_id": "111111111111", } - ec2 = botocore.session.get_session().create_client('ec2', config=BOTO_CONFIG) + ec2 = botocore.session.get_session().create_client("ec2", config=BOTO_CONFIG) ec2_stubber = Stubber(ec2) snaps = snaplist() - snaps['NextToken'] = '1234567890' + snaps["NextToken"] = "1234567890" ec2_stubber.add_response( - 'describe_snapshots', + "describe_snapshots", snaps, { - 'MaxResults': 100, - 'OwnerIds': [ event['account_id'] ], - 'RestorableByUserIds': [ 'all' ] - } + "MaxResults": 100, + "OwnerIds": [event["account_id"]], + "RestorableByUserIds": ["all"], + }, ) ec2_stubber.add_response( - 'describe_snapshots', + "describe_snapshots", snaplist(), { - 'MaxResults': 100, - 'OwnerIds': [ event['account_id'] ], - 'RestorableByUserIds': [ 'all' ], - 'NextToken': '1234567890' - } + "MaxResults": 100, + "OwnerIds": [event["account_id"]], + "RestorableByUserIds": ["all"], + "NextToken": "1234567890", + }, ) ec2_stubber.activate() - mocker.patch('GetPublicEBSSnapshots.connect_to_ec2', return_value=ec2) + mocker.patch("GetPublicEBSSnapshots.connect_to_ec2", return_value=ec2) assert getsnaps.get_public_snapshots(event, {}) == snapids + snapids ec2_stubber.assert_no_pending_responses() ec2_stubber.deactivate() + def test_get_snaps_testmode(mocker): - event = { - 'account_id': '111111111111', - 'testmode': True - } + event = {"account_id": "111111111111", "testmode": True} assert getsnaps.get_public_snapshots(event, {}) == snapids + def test_make_snaps_private(mocker): - event = { - 'account_id': '111111111111', + event: Dict[str, Any] = { + "account_id": "111111111111", } - - ec2 = botocore.session.get_session().create_client('ec2', config=BOTO_CONFIG) + ec2 = botocore.session.get_session().create_client("ec2", config=BOTO_CONFIG) ec2_stubber = Stubber(ec2) - event['snapshots'] = snapids + event["snapshots"] = snapids for snaps in range(0, len(snapids)): ec2_stubber.add_response( - 'modify_snapshot_attribute', + "modify_snapshot_attribute", {}, { - 'Attribute': 'CreateVolumePermission', - 'CreateVolumePermission': { - 'Remove': [{'Group': 'all'}] - }, - 'SnapshotId': snapids[snaps] - } + "Attribute": "CreateVolumePermission", + "CreateVolumePermission": {"Remove": [{"Group": "all"}]}, + "SnapshotId": snapids[snaps], + }, ) - ec2_stubber.add_response( - 'describe_snapshots', - {} - ) + ec2_stubber.add_response("describe_snapshots", {}) ec2_stubber.activate() - mocker.patch('MakeEBSSnapshotsPrivate.connect_to_ec2', return_value=ec2) + mocker.patch("MakeEBSSnapshotsPrivate.connect_to_ec2", return_value=ec2) assert updatesnaps.make_snapshots_private(event, {}) == { - "response": { - "message": "5 of 5 Snapshot permissions set to private", - "status": "Success" - } + "response": { + "message": "5 of 5 Snapshot permissions set to private", + "status": "Success", } + } ec2_stubber.assert_no_pending_responses() ec2_stubber.deactivate() diff --git a/source/remediation_runbooks/scripts/test/test_makerdssnapshotprivate.py b/source/remediation_runbooks/scripts/test/test_makerdssnapshotprivate.py index cf4442df..33c58e75 100644 --- a/source/remediation_runbooks/scripts/test/test_makerdssnapshotprivate.py +++ b/source/remediation_runbooks/scripts/test/test_makerdssnapshotprivate.py @@ -1,69 +1,53 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import boto3 -import json import botocore.session -from botocore.stub import Stubber, ANY -from botocore.config import Config -import pytest -from pytest_mock import mocker - import MakeRDSSnapshotPrivate as remediate +from botocore.config import Config +from botocore.stub import Stubber my_session = boto3.session.Session() my_region = my_session.region_name -BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region -) +BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) -db_snap_event = { - 'DBSnapshotId': 'snap-111111111111', - 'DBSnapshotType': 'snapshot' -} +db_snap_event = {"DBSnapshotId": "snap-111111111111", "DBSnapshotType": "snapshot"} cluster_snap_event = { - 'DBSnapshotId': 'snap-111111111111', - 'DBSnapshotType': 'cluster-snapshot' + "DBSnapshotId": "snap-111111111111", + "DBSnapshotType": "cluster-snapshot", } + def test_make_clustersnap_private(mocker): event = cluster_snap_event - rds = botocore.session.get_session().create_client('rds', config=BOTO_CONFIG) + rds = botocore.session.get_session().create_client("rds", config=BOTO_CONFIG) rds_stubber = Stubber(rds) - rds_stubber.add_response( - 'modify_db_cluster_snapshot_attribute', - {} - ) + rds_stubber.add_response("modify_db_cluster_snapshot_attribute", {}) rds_stubber.activate() - mocker.patch('MakeRDSSnapshotPrivate.connect_to_rds', return_value=rds) + mocker.patch("MakeRDSSnapshotPrivate.connect_to_rds", return_value=rds) assert remediate.make_snapshot_private(event, {}) == { - "response": { - "message": "Snapshot snap-111111111111 permissions set to private", - "status": "Success" - } + "response": { + "message": "Snapshot snap-111111111111 permissions set to private", + "status": "Success", } + } rds_stubber.assert_no_pending_responses() rds_stubber.deactivate() + def test_make_db_private(mocker): event = db_snap_event - rds = botocore.session.get_session().create_client('rds', config=BOTO_CONFIG) + rds = botocore.session.get_session().create_client("rds", config=BOTO_CONFIG) rds_stubber = Stubber(rds) - rds_stubber.add_response( - 'modify_db_snapshot_attribute', - {} - ) + rds_stubber.add_response("modify_db_snapshot_attribute", {}) rds_stubber.activate() - mocker.patch('MakeRDSSnapshotPrivate.connect_to_rds', return_value=rds) + mocker.patch("MakeRDSSnapshotPrivate.connect_to_rds", return_value=rds) assert remediate.make_snapshot_private(event, {}) == { - "response": { - "message": "Snapshot snap-111111111111 permissions set to private", - "status": "Success" - } + "response": { + "message": "Snapshot snap-111111111111 permissions set to private", + "status": "Success", } + } rds_stubber.assert_no_pending_responses() rds_stubber.deactivate() diff --git a/source/remediation_runbooks/scripts/test/test_puts3bucketpolicydeny.py b/source/remediation_runbooks/scripts/test/test_puts3bucketpolicydeny.py index c7e4af29..2f1180c5 100644 --- a/source/remediation_runbooks/scripts/test/test_puts3bucketpolicydeny.py +++ b/source/remediation_runbooks/scripts/test/test_puts3bucketpolicydeny.py @@ -1,28 +1,24 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import json + import boto3 import botocore.session -from botocore.stub import Stubber -from botocore.config import Config -import pytest -from pytest_mock import mocker - import PutS3BucketPolicyDeny as remediation +import pytest +from botocore.config import Config +from botocore.stub import Stubber my_session = boto3.session.Session() my_region = my_session.region_name -BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region -) +BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) + @pytest.fixture(autouse=True) def mock_get_partition(mocker): - mocker.patch('PutS3BucketPolicyDeny.get_partition', return_value='aws') + mocker.patch("PutS3BucketPolicyDeny.get_partition", return_value="aws") + def policy_basic_existing(): return { @@ -32,87 +28,66 @@ def policy_basic_existing(): { "Sid": "S3ReplicationPolicyStmt1", "Effect": "Allow", - "Principal": { - "AWS": "arn:aws:iam::111122223333:root" - }, + "Principal": {"AWS": "arn:aws:iam::111122223333:root"}, "Action": [ "s3:GetBucketVersioning", "s3:PutBucketVersioning", "s3:ReplicateObject", - "s3:ReplicateDelete" + "s3:ReplicateDelete", ], - "Resource": [ - "arn:aws:s3:::abucket", - "arn:aws:s3:::abucket/*" - ] + "Resource": ["arn:aws:s3:::abucket", "arn:aws:s3:::abucket/*"], }, { "Effect": "Allow", - "Principal": { - "AWS": "arn:aws:iam::111122223333:root" - }, + "Principal": {"AWS": "arn:aws:iam::111122223333:root"}, "Action": "s3:*", - "Resource": [ - "arn:aws:s3:::example", - "arn:aws:s3:::example/*" - ] - } - ] + "Resource": ["arn:aws:s3:::example", "arn:aws:s3:::example/*"], + }, + ], } + + def policy_basic_expected(): - return { + return { "Version": "2008-10-17", "Id": "MyBucketPolicy", "Statement": [ { "Sid": "S3ReplicationPolicyStmt1", "Effect": "Allow", - "Principal": { - "AWS": "arn:aws:iam::111122223333:root" - }, + "Principal": {"AWS": "arn:aws:iam::111122223333:root"}, "Action": [ "s3:GetBucketVersioning", "s3:PutBucketVersioning", "s3:ReplicateObject", - "s3:ReplicateDelete" + "s3:ReplicateDelete", ], - "Resource": [ - "arn:aws:s3:::abucket", - "arn:aws:s3:::abucket/*" - ] + "Resource": ["arn:aws:s3:::abucket", "arn:aws:s3:::abucket/*"], }, { "Effect": "Allow", - "Principal": { - "AWS": "arn:aws:iam::111122223333:root" - }, + "Principal": {"AWS": "arn:aws:iam::111122223333:root"}, "Action": "s3:*", - "Resource": [ - "arn:aws:s3:::example", - "arn:aws:s3:::example/*" - ] + "Resource": ["arn:aws:s3:::example", "arn:aws:s3:::example/*"], }, { "Effect": "Deny", "Principal": { - "AWS": [ "arn:aws:iam::111122223333:root" ], - + "AWS": ["arn:aws:iam::111122223333:root"], }, "Action": [ "s3:DeleteBucketPolicy", "s3:PutBucketAcl", "s3:PutBucketPolicy", "s3:PutObjectAcl", - "s3:PutEncryptionConfiguration" + "s3:PutEncryptionConfiguration", ], - "Resource": [ - "arn:aws:s3:::example", - "arn:aws:s3:::example/*" - ] - } - ] + "Resource": ["arn:aws:s3:::example", "arn:aws:s3:::example/*"], + }, + ], } + def policy_multi_principal_existing(): return { "Version": "2008-10-17", @@ -125,44 +100,36 @@ def policy_multi_principal_existing(): "AWS": [ "arn:aws:iam::111122223333:root", "arn:aws:iam::111122223333:user/Dave", - "arn:aws:iam::222233334444:user/Lalit" + "arn:aws:iam::222233334444:user/Lalit", ] }, "Action": [ "s3:GetBucketVersioning", "s3:PutBucketVersioning", "s3:ReplicateObject", - "s3:ReplicateDelete" + "s3:ReplicateDelete", ], - "Resource": [ - "arn:aws:s3:::abucket", - "arn:aws:s3:::abucket/*" - ] + "Resource": ["arn:aws:s3:::abucket", "arn:aws:s3:::abucket/*"], }, { "Effect": "Allow", "Principal": { "AWS": "arn:aws:iam::111122223333:root", - "Service": "ssm.amazonaws.com" + "Service": "ssm.amazonaws.com", }, "Action": "s3:*", - "Resource": [ - "arn:aws:s3:::example", - "arn:aws:s3:::example/*" - ] + "Resource": ["arn:aws:s3:::example", "arn:aws:s3:::example/*"], }, { "Effect": "Allow", "Principal": "*", "Action": "s3:*", - "Resource": [ - "arn:aws:s3:::example", - "arn:aws:s3:::example/*" - ] - } - ] + "Resource": ["arn:aws:s3:::example", "arn:aws:s3:::example/*"], + }, + ], } + def policy_multi_principal_expected(): return { "Version": "2008-10-17", @@ -175,48 +142,38 @@ def policy_multi_principal_expected(): "AWS": [ "arn:aws:iam::111122223333:root", "arn:aws:iam::111122223333:user/Dave", - "arn:aws:iam::222233334444:user/Lalit" + "arn:aws:iam::222233334444:user/Lalit", ] }, "Action": [ "s3:GetBucketVersioning", "s3:PutBucketVersioning", "s3:ReplicateObject", - "s3:ReplicateDelete" + "s3:ReplicateDelete", ], - "Resource": [ - "arn:aws:s3:::abucket", - "arn:aws:s3:::abucket/*" - ] + "Resource": ["arn:aws:s3:::abucket", "arn:aws:s3:::abucket/*"], }, { "Effect": "Allow", "Principal": { "AWS": "arn:aws:iam::111122223333:root", - "Service": "ssm.amazonaws.com" + "Service": "ssm.amazonaws.com", }, "Action": "s3:*", - "Resource": [ - "arn:aws:s3:::example", - "arn:aws:s3:::example/*" - ] + "Resource": ["arn:aws:s3:::example", "arn:aws:s3:::example/*"], }, { "Effect": "Allow", "Principal": "*", "Action": "s3:*", - "Resource": [ - "arn:aws:s3:::example", - "arn:aws:s3:::example/*" - ] + "Resource": ["arn:aws:s3:::example", "arn:aws:s3:::example/*"], }, { "Effect": "Deny", "Principal": { - "AWS": - [ + "AWS": [ "arn:aws:iam::111122223333:user/Dave", - "arn:aws:iam::111122223333:root" + "arn:aws:iam::111122223333:root", ] }, "Action": [ @@ -224,16 +181,14 @@ def policy_multi_principal_expected(): "s3:PutBucketAcl", "s3:PutBucketPolicy", "s3:PutObjectAcl", - "s3:PutEncryptionConfiguration" + "s3:PutEncryptionConfiguration", ], - "Resource": [ - "arn:aws:s3:::example", - "arn:aws:s3:::example/*" - ] - } - ] + "Resource": ["arn:aws:s3:::example", "arn:aws:s3:::example/*"], + }, + ], } + def policy_statement_no_aws_principals(): return { "Version": "2012-10-17", @@ -241,38 +196,31 @@ def policy_statement_no_aws_principals(): { "Sid": "AWSCloudTrailAclCheck20150319", "Effect": "Allow", - "Principal": { - "Service": "cloudtrail.amazonaws.com" - }, + "Principal": {"Service": "cloudtrail.amazonaws.com"}, "Action": "s3:GetBucketAcl", - "Resource": "arn:aws:s3:::aws-cloudtrail-logs-222233334444-d425bf6a" + "Resource": "arn:aws:s3:::aws-cloudtrail-logs-222233334444-d425bf6a", }, { "Sid": "AWSCloudTrailWrite20150319", "Effect": "Allow", - "Principal": { - "Service": "cloudtrail.amazonaws.com" - }, + "Principal": {"Service": "cloudtrail.amazonaws.com"}, "Action": "s3:PutObject", "Resource": "arn:aws:s3:::aws-cloudtrail-logs-222233334444-d425bf6a/AWSLogs/222233334444/*", "Condition": { - "StringEquals": { - "s3:x-amz-acl": "bucket-owner-full-control" - } - } + "StringEquals": {"s3:x-amz-acl": "bucket-owner-full-control"} + }, }, { "Sid": "ExternalAccount", "Effect": "Allow", - "Principal": { - "AWS": "arn:aws:iam::111122223333:user/test" - }, + "Principal": {"AWS": "arn:aws:iam::111122223333:user/test"}, "Action": "s3:PutObjectAcl", - "Resource": "arn:aws:s3:::aws-cloudtrail-logs-222233334444-d425bf6a/*" - } - ] + "Resource": "arn:aws:s3:::aws-cloudtrail-logs-222233334444-d425bf6a/*", + }, + ], } + def policy_statement_no_aws_principals_expected(): return { "Version": "2012-10-17", @@ -280,145 +228,169 @@ def policy_statement_no_aws_principals_expected(): { "Sid": "AWSCloudTrailAclCheck20150319", "Effect": "Allow", - "Principal": { - "Service": "cloudtrail.amazonaws.com" - }, + "Principal": {"Service": "cloudtrail.amazonaws.com"}, "Action": "s3:GetBucketAcl", - "Resource": "arn:aws:s3:::aws-cloudtrail-logs-222233334444-d425bf6a" + "Resource": "arn:aws:s3:::aws-cloudtrail-logs-222233334444-d425bf6a", }, { "Sid": "AWSCloudTrailWrite20150319", "Effect": "Allow", - "Principal": { - "Service": "cloudtrail.amazonaws.com" - }, + "Principal": {"Service": "cloudtrail.amazonaws.com"}, "Action": "s3:PutObject", "Resource": "arn:aws:s3:::aws-cloudtrail-logs-222233334444-d425bf6a/AWSLogs/222233334444/*", "Condition": { - "StringEquals": { - "s3:x-amz-acl": "bucket-owner-full-control" - } - } + "StringEquals": {"s3:x-amz-acl": "bucket-owner-full-control"} + }, }, { "Sid": "ExternalAccount", "Effect": "Allow", - "Principal": { - "AWS": "arn:aws:iam::111122223333:user/test" - }, + "Principal": {"AWS": "arn:aws:iam::111122223333:user/test"}, "Action": "s3:PutObjectAcl", - "Resource": "arn:aws:s3:::aws-cloudtrail-logs-222233334444-d425bf6a/*" + "Resource": "arn:aws:s3:::aws-cloudtrail-logs-222233334444-d425bf6a/*", }, { "Effect": "Deny", - "Principal": { - "AWS": - [ - "arn:aws:iam::111122223333:user/test" - ] - }, + "Principal": {"AWS": ["arn:aws:iam::111122223333:user/test"]}, "Action": [ "s3:DeleteBucketPolicy", "s3:PutBucketAcl", "s3:PutBucketPolicy", "s3:PutObjectAcl", - "s3:PutEncryptionConfiguration" + "s3:PutEncryptionConfiguration", ], "Resource": [ "arn:aws:s3:::aws-cloudtrail-logs-222233334444-d425bf6a", - "arn:aws:s3:::aws-cloudtrail-logs-222233334444-d425bf6a/*" - ] - } - ] + "arn:aws:s3:::aws-cloudtrail-logs-222233334444-d425bf6a/*", + ], + }, + ], + } + + +def policy_only_star_principals(): + return { + "Version": "2008-10-17", + "Id": "MyBucketPolicy", + "Statement": [ + { + "Sid": "S3ReplicationPolicyStmt1", + "Effect": "Allow", + "Principal": "*", + "Action": [ + "s3:DeleteBucketPolicy", + "s3:PutBucketAcl", + ], + "Resource": ["arn:aws:s3:::abucket", "arn:aws:s3:::abucket/*"], + }, + { + "Effect": "Allow", + "Principal": "*", + "Action": "s3:*", + "Resource": ["arn:aws:s3:::example", "arn:aws:s3:::example/*"], + }, + ], } + def event(): return { - 'bucket': 'example', - 'accountid': '222233334444', - 'denylist': 's3:DeleteBucketPolicy,s3:PutBucketAcl,s3:PutBucketPolicy,s3:PutObjectAcl,s3:PutEncryptionConfiguration' + "bucket": "example", + "accountid": "222233334444", + "denylist": "s3:DeleteBucketPolicy,s3:PutBucketAcl,s3:PutBucketPolicy,s3:PutObjectAcl,s3:PutEncryptionConfiguration", } + def test_new_policy(mocker): - s3_client = botocore.session.get_session().create_client('s3', config=BOTO_CONFIG) + s3_client = botocore.session.get_session().create_client("s3", config=BOTO_CONFIG) s3_stubber = Stubber(s3_client) s3_stubber.add_response( - 'get_bucket_policy', - { - "Policy": json.dumps(policy_basic_existing()) - }, - expected_params={ - 'Bucket': 'example', - 'ExpectedBucketOwner': '222233334444' - } + "get_bucket_policy", + {"Policy": json.dumps(policy_basic_existing())}, + expected_params={"Bucket": "example", "ExpectedBucketOwner": "222233334444"}, ) s3_stubber.add_response( - 'put_bucket_policy', + "put_bucket_policy", {}, expected_params={ - 'Bucket': 'example', - 'ExpectedBucketOwner': '222233334444', - 'Policy': json.dumps(policy_basic_expected()) - } + "Bucket": "example", + "ExpectedBucketOwner": "222233334444", + "Policy": json.dumps(policy_basic_expected()), + }, ) s3_stubber.activate() - mocker.patch('PutS3BucketPolicyDeny.connect_to_s3', return_value=s3_client) - assert remediation.update_bucket_policy(event(), {}) == None + mocker.patch("PutS3BucketPolicyDeny.connect_to_s3", return_value=s3_client) + assert remediation.update_bucket_policy(event(), {}) is None s3_stubber.deactivate() + def test_new_policy_multiple(mocker): - s3_client = botocore.session.get_session().create_client('s3', config=BOTO_CONFIG) + s3_client = botocore.session.get_session().create_client("s3", config=BOTO_CONFIG) s3_stubber = Stubber(s3_client) s3_stubber.add_response( - 'get_bucket_policy', - { - "Policy": json.dumps(policy_multi_principal_existing()) - }, - expected_params={ - 'Bucket': 'example', - 'ExpectedBucketOwner': '222233334444' - } + "get_bucket_policy", + {"Policy": json.dumps(policy_multi_principal_existing())}, + expected_params={"Bucket": "example", "ExpectedBucketOwner": "222233334444"}, ) s3_stubber.add_response( - 'put_bucket_policy', + "put_bucket_policy", {}, expected_params={ - 'Bucket': 'example', - 'ExpectedBucketOwner': '222233334444', - 'Policy': json.dumps(policy_multi_principal_expected()) - } + "Bucket": "example", + "ExpectedBucketOwner": "222233334444", + "Policy": json.dumps(policy_multi_principal_expected()), + }, ) s3_stubber.activate() - mocker.patch('PutS3BucketPolicyDeny.connect_to_s3', return_value=s3_client) - assert remediation.update_bucket_policy(event(), {}) == None + mocker.patch("PutS3BucketPolicyDeny.connect_to_s3", return_value=s3_client) + assert remediation.update_bucket_policy(event(), {}) is None s3_stubber.deactivate() + def test_policy_statement_no_aws_principals(mocker): - s3_client = botocore.session.get_session().create_client('s3', config=BOTO_CONFIG) + s3_client = botocore.session.get_session().create_client("s3", config=BOTO_CONFIG) s3_stubber = Stubber(s3_client) - bucket_name = 'aws-cloudtrail-logs-222233334444-d425bf6a' + bucket_name = "aws-cloudtrail-logs-222233334444-d425bf6a" s3_stubber.add_response( - 'get_bucket_policy', - { - "Policy": json.dumps(policy_statement_no_aws_principals()) - }, - expected_params={ - 'Bucket': bucket_name, - 'ExpectedBucketOwner': '222233334444' - } + "get_bucket_policy", + {"Policy": json.dumps(policy_statement_no_aws_principals())}, + expected_params={"Bucket": bucket_name, "ExpectedBucketOwner": "222233334444"}, ) s3_stubber.add_response( - 'put_bucket_policy', + "put_bucket_policy", {}, expected_params={ - 'Bucket': bucket_name, - 'ExpectedBucketOwner': '222233334444', - 'Policy': json.dumps(policy_statement_no_aws_principals_expected()) - } + "Bucket": bucket_name, + "ExpectedBucketOwner": "222233334444", + "Policy": json.dumps(policy_statement_no_aws_principals_expected()), + }, ) s3_stubber.activate() - mocker.patch('PutS3BucketPolicyDeny.connect_to_s3', return_value=s3_client) + mocker.patch("PutS3BucketPolicyDeny.connect_to_s3", return_value=s3_client) this_event = event() - this_event['bucket'] = bucket_name - assert remediation.update_bucket_policy(this_event, {}) == None + this_event["bucket"] = bucket_name + assert remediation.update_bucket_policy(this_event, {}) is None + s3_stubber.deactivate() + + +def test_policy_statement_only_star_principal(mocker): + s3_client = botocore.session.get_session().create_client("s3", config=BOTO_CONFIG) + s3_stubber = Stubber(s3_client) + s3_stubber.add_response( + "get_bucket_policy", + {"Policy": json.dumps(policy_only_star_principals())}, + expected_params={"Bucket": "example", "ExpectedBucketOwner": "222233334444"}, + ) + s3_stubber.activate() + mocker.patch("PutS3BucketPolicyDeny.connect_to_s3", return_value=s3_client) + + with pytest.raises(SystemExit) as pytest_wrapped_e: + remediation.update_bucket_policy(event(), {}) + bucket_name = event().get("bucket") + assert pytest_wrapped_e.type == SystemExit + assert ( + pytest_wrapped_e.value.code + == f"Unable to create an explicit deny statement for {bucket_name}" + ) + s3_stubber.deactivate() diff --git a/source/remediation_runbooks/scripts/test/test_remove_codebuild_privileged_mode.py b/source/remediation_runbooks/scripts/test/test_remove_codebuild_privileged_mode.py new file mode 100644 index 00000000..f397e215 --- /dev/null +++ b/source/remediation_runbooks/scripts/test/test_remove_codebuild_privileged_mode.py @@ -0,0 +1,147 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test the functionality of the `remove_codebuild_privileged_mode` remediation script""" + +from datetime import datetime +from unittest.mock import patch + +import boto3 +from botocore.config import Config +from botocore.stub import Stubber +from remove_codebuild_privileged_mode import lambda_handler + + +def test_remove_codebuild_privileged_mode(mocker): + BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + codebuild = boto3.client("codebuild", config=BOTO_CONFIG) + stub_codebuild = Stubber(codebuild) + clients = {"codebuild": codebuild} + + project_name = "TestProject" + + stub_codebuild.add_response( + "batch_get_projects", describedCodeBuildProject, {"names": [project_name]} + ) + + stub_codebuild.add_response( + "update_project", + {}, + {"name": project_name, "environment": edited_environment}, + ) + + describedCodeBuildProject["projects"][0]["environment"] = edited_environment + + stub_codebuild.add_response( + "batch_get_projects", describedCodeBuildProject, {"names": [project_name]} + ) + + stub_codebuild.activate() + + with patch("boto3.client", side_effect=lambda service, **_: clients[service]): + event = {"project_name": project_name} + response = lambda_handler(event, {}) + assert response == {"privilegedMode": edited_environment["privilegedMode"]} + + +describedCodeBuildProject = { + "projects": [ + { + "name": "TestProject", + "arn": "arn", + "description": "description", + "source": {"type": "BITBUCKET"}, + "secondarySources": [{"type": "BITBUCKET"}], + "sourceVersion": "test", + "secondarySourceVersions": [{"sourceIdentifier": "", "sourceVersion": ""}], + "artifacts": {"type": "CODEPIPELINE"}, + "secondaryArtifacts": [{"type": "CODEPIPELINE"}], + "cache": {"type": "LOCAL"}, + "environment": { + "type": "WINDOWS_CONTAINER", + "image": "test", + "computeType": "BUILD_GENERAL1_2XLARGE", + "environmentVariables": [], + "privilegedMode": True, + "certificate": "test", + "registryCredential": { + "credential": "test", + "credentialProvider": "SECRETS_MANAGER", + }, + "imagePullCredentialsType": "CODEBUILD", + }, + "serviceRole": "test", + "timeoutInMinutes": 5, + "queuedTimeoutInMinutes": 5, + "encryptionKey": "test", + "tags": [{"key": "test", "value": "test"}], + "created": datetime(2015, 1, 1), + "lastModified": datetime(2015, 1, 1), + "webhook": { + "url": "test", + "payloadUrl": "test", + "secret": "test", + "branchFilter": "test", + "filterGroups": [], + "buildType": "BUILD", + "lastModifiedSecret": datetime(2015, 1, 1), + }, + "vpcConfig": { + "vpcId": "test", + "subnets": ["test"], + "securityGroupIds": ["test"], + }, + "badge": {"badgeEnabled": False, "badgeRequestUrl": ""}, + "logsConfig": { + "cloudWatchLogs": {"status": "ENABLED"}, + "s3Logs": {"status": "DISABLED"}, + }, + "fileSystemLocations": [ + { + "identifier": "", + "mountOptions": "", + "mountPoint": "", + "location": "", + "type": "EFS", + } + ], + "buildBatchConfig": { + "serviceRole": "test", + "combineArtifacts": False, + "timeoutInMins": 1, + "restrictions": { + "maximumBuildsAllowed": 1, + "computeTypesAllowed": [ + "string", + ], + }, + "batchReportMode": "REPORT_INDIVIDUAL_BUILDS", + }, + "concurrentBuildLimit": 1, + "projectVisibility": "PRIVATE", + "publicProjectAlias": "test", + "resourceAccessRole": "test", + } + ], + "projectsNotFound": ["notFound"], + "ResponseMetadata": { + "HostId": "test", + "RequestId": "test", + "HTTPStatusCode": 404, + "HTTPHeaders": {"test": "test"}, + "RetryAttempts": 1, + }, +} + +edited_environment = { + "type": "WINDOWS_CONTAINER", + "image": "test", + "computeType": "BUILD_GENERAL1_2XLARGE", + "environmentVariables": [], + "privilegedMode": False, + "certificate": "test", + "registryCredential": { + "credential": "test", + "credentialProvider": "SECRETS_MANAGER", + }, + "imagePullCredentialsType": "CODEBUILD", +} diff --git a/source/remediation_runbooks/scripts/test/test_removelambdapublicaccess.py b/source/remediation_runbooks/scripts/test/test_removelambdapublicaccess.py index a7d3c323..723fc30a 100644 --- a/source/remediation_runbooks/scripts/test/test_removelambdapublicaccess.py +++ b/source/remediation_runbooks/scripts/test/test_removelambdapublicaccess.py @@ -2,36 +2,32 @@ # SPDX-License-Identifier: Apache-2.0 import boto3 import botocore.session -from botocore.stub import Stubber -from botocore.config import Config -import pytest -from pytest_mock import mocker - import RemoveLambdaPublicAccess as remediation +from botocore.config import Config +from botocore.stub import Stubber my_session = boto3.session.Session() my_region = my_session.region_name + def test_success(mocker): - event = { - 'FunctionName': 'myPublicTestFunction' - } + event = {"FunctionName": "myPublicTestFunction"} get_policy_initial_response = { "ResponseMetadata": { "RequestId": "8a2cc603-ba43-467d-be12-a4f7a28f93bf", "HTTPStatusCode": 200, "HTTPHeaders": { - "date": "Tue, 27 Jul 2021 13:02:30 GMT", - "content-type": "application/json", - "content-length": "341", - "connection": "keep-alive", - "x-amzn-requestid": "8a2cc603-ba43-467d-be12-a4f7a28f93bf" + "date": "Tue, 27 Jul 2021 13:02:30 GMT", + "content-type": "application/json", + "content-length": "341", + "connection": "keep-alive", + "x-amzn-requestid": "8a2cc603-ba43-467d-be12-a4f7a28f93bf", }, - "RetryAttempts": 0 + "RetryAttempts": 0, }, - 'Policy': "{\"Version\":\"2012-10-17\",\"Id\":\"default\",\"Statement\":[{\"Sid\":\"sdfsdf\",\"Effect\":\"Allow\",\"Principal\":{\"Service\":\"events.amazonaws.com\"},\"Action\":\"lambda:InvokeFunction\",\"Resource\":\"arn:aws:lambda:us-east-1:111111111111:function:myPublicTestFunction\"},{\"Sid\":\"SHARRTest\",\"Effect\":\"Allow\",\"Principal\":\"*\",\"Action\":\"lambda:InvokeFunction\",\"Resource\":\"arn:aws:lambda:us-east-1:111111111111:function:myPublicTestFunction\"}]}", - "RevisionId": "43f41078-ecd3-406d-b862-d770019c262c" + "Policy": '{"Version":"2012-10-17","Id":"default","Statement":[{"Sid":"sdfsdf","Effect":"Allow","Principal":{"Service":"events.amazonaws.com"},"Action":"lambda:InvokeFunction","Resource":"arn:aws:lambda:us-east-1:111111111111:function:myPublicTestFunction"},{"Sid":"SHARRTest","Effect":"Allow","Principal":"*","Action":"lambda:InvokeFunction","Resource":"arn:aws:lambda:us-east-1:111111111111:function:myPublicTestFunction"}]}', + "RevisionId": "43f41078-ecd3-406d-b862-d770019c262c", } get_policy_after_response = { @@ -39,90 +35,79 @@ def test_success(mocker): "RequestId": "8a2cc603-ba43-467d-be12-a4f7a28f93bf", "HTTPStatusCode": 200, "HTTPHeaders": { - "date": "Tue, 27 Jul 2021 13:02:30 GMT", - "content-type": "application/json", - "content-length": "341", - "connection": "keep-alive", - "x-amzn-requestid": "8a2cc603-ba43-467d-be12-a4f7a28f93bf" + "date": "Tue, 27 Jul 2021 13:02:30 GMT", + "content-type": "application/json", + "content-length": "341", + "connection": "keep-alive", + "x-amzn-requestid": "8a2cc603-ba43-467d-be12-a4f7a28f93bf", }, - "RetryAttempts": 0 + "RetryAttempts": 0, }, - "Policy": "{\"Version\":\"2012-10-17\",\"Id\":\"default\",\"Statement\":[{\"Sid\":\"sdfsdf\",\"Effect\":\"Allow\",\"Principal\":{\"Service\":\"events.amazonaws.com\"},\"Action\":\"lambda:InvokeFunction\",\"Resource\":\"arn:aws:lambda:us-east-1:111111111111:function:myPublicTestFunction\"}]}", - "RevisionId": "43f41078-ecd3-406d-b862-d770019c262c" + "Policy": '{"Version":"2012-10-17","Id":"default","Statement":[{"Sid":"sdfsdf","Effect":"Allow","Principal":{"Service":"events.amazonaws.com"},"Action":"lambda:InvokeFunction","Resource":"arn:aws:lambda:us-east-1:111111111111:function:myPublicTestFunction"}]}', + "RevisionId": "43f41078-ecd3-406d-b862-d770019c262c", } - BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region - ) + BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) - ### Clients - lambda_client = botocore.session.get_session().create_client('lambda', config=BOTO_CONFIG) + # Clients + lambda_client = botocore.session.get_session().create_client( + "lambda", config=BOTO_CONFIG + ) lambda_stubber = Stubber(lambda_client) lambda_stubber.add_response( - 'get_policy', + "get_policy", get_policy_initial_response, - { - 'FunctionName': 'myPublicTestFunction' - } + {"FunctionName": "myPublicTestFunction"}, ) lambda_stubber.add_response( - 'remove_permission', + "remove_permission", {}, - { - 'FunctionName': 'myPublicTestFunction', - 'StatementId': 'SHARRTest' - } + {"FunctionName": "myPublicTestFunction", "StatementId": "SHARRTest"}, ) lambda_stubber.add_response( - 'get_policy', + "get_policy", get_policy_after_response, - { - 'FunctionName': 'myPublicTestFunction' - } + {"FunctionName": "myPublicTestFunction"}, ) lambda_stubber.add_response( - 'get_policy', + "get_policy", get_policy_after_response, - { - 'FunctionName': 'myPublicTestFunction' - } + {"FunctionName": "myPublicTestFunction"}, ) lambda_stubber.activate() - mocker.patch('RemoveLambdaPublicAccess.connect_to_lambda', return_value=lambda_client) + mocker.patch( + "RemoveLambdaPublicAccess.connect_to_lambda", return_value=lambda_client + ) - assert remediation.remove_lambda_public_access(event, {}) == None + assert remediation.remove_lambda_public_access(event, {}) is None lambda_stubber.deactivate() + def test_success_aws_star(mocker): - event = { - 'FunctionName': 'myPublicTestFunction' - } + event = {"FunctionName": "myPublicTestFunction"} get_policy_initial_response = { "ResponseMetadata": { "RequestId": "8a2cc603-ba43-467d-be12-a4f7a28f93bf", "HTTPStatusCode": 200, "HTTPHeaders": { - "date": "Tue, 27 Jul 2021 13:02:30 GMT", - "content-type": "application/json", - "content-length": "341", - "connection": "keep-alive", - "x-amzn-requestid": "8a2cc603-ba43-467d-be12-a4f7a28f93bf" + "date": "Tue, 27 Jul 2021 13:02:30 GMT", + "content-type": "application/json", + "content-length": "341", + "connection": "keep-alive", + "x-amzn-requestid": "8a2cc603-ba43-467d-be12-a4f7a28f93bf", }, - "RetryAttempts": 0 + "RetryAttempts": 0, }, - 'Policy': "{\"Version\":\"2012-10-17\",\"Id\":\"default\",\"Statement\":[{\"Sid\":\"sdfsdf\",\"Effect\":\"Allow\",\"Principal\":{\"Service\":\"events.amazonaws.com\"},\"Action\":\"lambda:InvokeFunction\",\"Resource\":\"arn:aws:lambda:us-east-1:111111111111:function:myPublicTestFunction\"},{\"Sid\":\"SHARRTest\",\"Effect\":\"Allow\",\"Principal\":{\"AWS\":\"*\"},\"Action\":\"lambda:InvokeFunction\",\"Resource\":\"arn:aws:lambda:us-east-1:111111111111:function:myPublicTestFunction\"}]}", - "RevisionId": "43f41078-ecd3-406d-b862-d770019c262c" + "Policy": '{"Version":"2012-10-17","Id":"default","Statement":[{"Sid":"sdfsdf","Effect":"Allow","Principal":{"Service":"events.amazonaws.com"},"Action":"lambda:InvokeFunction","Resource":"arn:aws:lambda:us-east-1:111111111111:function:myPublicTestFunction"},{"Sid":"SHARRTest","Effect":"Allow","Principal":{"AWS":"*"},"Action":"lambda:InvokeFunction","Resource":"arn:aws:lambda:us-east-1:111111111111:function:myPublicTestFunction"}]}', + "RevisionId": "43f41078-ecd3-406d-b862-d770019c262c", } get_policy_after_response = { @@ -130,156 +115,138 @@ def test_success_aws_star(mocker): "RequestId": "8a2cc603-ba43-467d-be12-a4f7a28f93bf", "HTTPStatusCode": 200, "HTTPHeaders": { - "date": "Tue, 27 Jul 2021 13:02:30 GMT", - "content-type": "application/json", - "content-length": "341", - "connection": "keep-alive", - "x-amzn-requestid": "8a2cc603-ba43-467d-be12-a4f7a28f93bf" + "date": "Tue, 27 Jul 2021 13:02:30 GMT", + "content-type": "application/json", + "content-length": "341", + "connection": "keep-alive", + "x-amzn-requestid": "8a2cc603-ba43-467d-be12-a4f7a28f93bf", }, - "RetryAttempts": 0 + "RetryAttempts": 0, }, - "Policy": "{\"Version\":\"2012-10-17\",\"Id\":\"default\",\"Statement\":[{\"Sid\":\"sdfsdf\",\"Effect\":\"Allow\",\"Principal\":{\"Service\":\"events.amazonaws.com\"},\"Action\":\"lambda:InvokeFunction\",\"Resource\":\"arn:aws:lambda:us-east-1:111111111111:function:myPublicTestFunction\"}]}", - "RevisionId": "43f41078-ecd3-406d-b862-d770019c262c" + "Policy": '{"Version":"2012-10-17","Id":"default","Statement":[{"Sid":"sdfsdf","Effect":"Allow","Principal":{"Service":"events.amazonaws.com"},"Action":"lambda:InvokeFunction","Resource":"arn:aws:lambda:us-east-1:111111111111:function:myPublicTestFunction"}]}', + "RevisionId": "43f41078-ecd3-406d-b862-d770019c262c", } - BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region - ) + BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) - ### Clients - lambda_client = botocore.session.get_session().create_client('lambda', config=BOTO_CONFIG) + # Clients + lambda_client = botocore.session.get_session().create_client( + "lambda", config=BOTO_CONFIG + ) lambda_stubber = Stubber(lambda_client) lambda_stubber.add_response( - 'get_policy', + "get_policy", get_policy_initial_response, - { - 'FunctionName': 'myPublicTestFunction' - } + {"FunctionName": "myPublicTestFunction"}, ) lambda_stubber.add_response( - 'remove_permission', + "remove_permission", {}, - { - 'FunctionName': 'myPublicTestFunction', - 'StatementId': 'SHARRTest' - } + {"FunctionName": "myPublicTestFunction", "StatementId": "SHARRTest"}, ) lambda_stubber.add_response( - 'get_policy', + "get_policy", get_policy_after_response, - { - 'FunctionName': 'myPublicTestFunction' - } + {"FunctionName": "myPublicTestFunction"}, ) lambda_stubber.add_response( - 'get_policy', + "get_policy", get_policy_after_response, - { - 'FunctionName': 'myPublicTestFunction' - } + {"FunctionName": "myPublicTestFunction"}, ) lambda_stubber.activate() - mocker.patch('RemoveLambdaPublicAccess.connect_to_lambda', return_value=lambda_client) + mocker.patch( + "RemoveLambdaPublicAccess.connect_to_lambda", return_value=lambda_client + ) - assert remediation.remove_lambda_public_access(event, {}) == None + assert remediation.remove_lambda_public_access(event, {}) is None lambda_stubber.deactivate() + def test_success_s3_statement(mocker): - event = { - 'FunctionName': 'myPublicS3TestFunction' - } + event = {"FunctionName": "myPublicS3TestFunction"} get_policy_initial_response = { "ResponseMetadata": { "RequestId": "8a2cc603-ba43-467d-be12-a4f7a28f93bf", "HTTPStatusCode": 200, "HTTPHeaders": { - "date": "Tue, 27 Jul 2021 13:02:30 GMT", - "content-type": "application/json", - "content-length": "341", - "connection": "keep-alive", - "x-amzn-requestid": "8a2cc603-ba43-467d-be12-a4f7a28f93bf" + "date": "Tue, 27 Jul 2021 13:02:30 GMT", + "content-type": "application/json", + "content-length": "341", + "connection": "keep-alive", + "x-amzn-requestid": "8a2cc603-ba43-467d-be12-a4f7a28f93bf", }, - "RetryAttempts": 0 + "RetryAttempts": 0, }, - "Policy": "{\"Version\":\"2012-10-17\",\"Id\":\"default\",\"Statement\":[{\"Sid\":\"sdfsdf\",\"Effect\":\"Allow\",\"Principal\":{\"Service\":\"events.amazonaws.com\"},\"Action\":\"lambda:InvokeFunction\",\"Resource\":\"arn:aws:lambda:us-east-1:111111111111:function:myPublicS3TestFunction\"},{\"Sid\": \"lambda-allow-s3-my-function-test\",\"Effect\": \"Allow\",\"Principal\": {\"Service\": \"s3.amazonaws.com\"},\"Action\": \"lambda:InvokeFunction\",\"Resource\":\"arn:aws:lambda:us-east-1:111111111111:function:myPublicS3TestFunction\", \"Condition\": {\"ArnLike\": {\"AWS:SourceArn\": \"arn:aws:s3:::my-bucket\"}},\"RevisionId\":\"43f41078-ecd3-406d-b862-d770019c262c\"}]}" + "Policy": '{"Version":"2012-10-17","Id":"default","Statement":[{"Sid":"sdfsdf","Effect":"Allow","Principal":{"Service":"events.amazonaws.com"},"Action":"lambda:InvokeFunction","Resource":"arn:aws:lambda:us-east-1:111111111111:function:myPublicS3TestFunction"},{"Sid": "lambda-allow-s3-my-function-test","Effect": "Allow","Principal": {"Service": "s3.amazonaws.com"},"Action": "lambda:InvokeFunction","Resource":"arn:aws:lambda:us-east-1:111111111111:function:myPublicS3TestFunction", "Condition": {"ArnLike": {"AWS:SourceArn": "arn:aws:s3:::my-bucket"}},"RevisionId":"43f41078-ecd3-406d-b862-d770019c262c"}]}', } - + get_policy_after_response = { "ResponseMetadata": { "RequestId": "8a2cc603-ba43-467d-be12-a4f7a28f93bf", "HTTPStatusCode": 200, "HTTPHeaders": { - "date": "Tue, 27 Jul 2021 13:02:30 GMT", - "content-type": "application/json", - "content-length": "341", - "connection": "keep-alive", - "x-amzn-requestid": "8a2cc603-ba43-467d-be12-a4f7a28f93bf" + "date": "Tue, 27 Jul 2021 13:02:30 GMT", + "content-type": "application/json", + "content-length": "341", + "connection": "keep-alive", + "x-amzn-requestid": "8a2cc603-ba43-467d-be12-a4f7a28f93bf", }, - "RetryAttempts": 0 + "RetryAttempts": 0, }, - "Policy": "{\"Version\":\"2012-10-17\",\"Id\":\"default\",\"Statement\":[{\"Sid\":\"sdfsdf\",\"Effect\":\"Allow\",\"Principal\":{\"Service\":\"events.amazonaws.com\"},\"Action\":\"lambda:InvokeFunction\",\"Resource\":\"arn:aws:lambda:us-east-1:111111111111:function:myPublicS3TestFunction\"}]}", - "RevisionId": "43f41078-ecd3-406d-b862-d770019c262c" + "Policy": '{"Version":"2012-10-17","Id":"default","Statement":[{"Sid":"sdfsdf","Effect":"Allow","Principal":{"Service":"events.amazonaws.com"},"Action":"lambda:InvokeFunction","Resource":"arn:aws:lambda:us-east-1:111111111111:function:myPublicS3TestFunction"}]}', + "RevisionId": "43f41078-ecd3-406d-b862-d770019c262c", } - BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region - ) + BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) - ### Clients - lambda_client = botocore.session.get_session().create_client('lambda', config=BOTO_CONFIG) + # Clients + lambda_client = botocore.session.get_session().create_client( + "lambda", config=BOTO_CONFIG + ) lambda_stubber = Stubber(lambda_client) lambda_stubber.add_response( - 'get_policy', + "get_policy", get_policy_initial_response, - { - 'FunctionName': 'myPublicS3TestFunction' - } + {"FunctionName": "myPublicS3TestFunction"}, ) lambda_stubber.add_response( - 'remove_permission', + "remove_permission", {}, { - 'FunctionName': 'myPublicS3TestFunction', - 'StatementId': 'lambda-allow-s3-my-function-test' - } + "FunctionName": "myPublicS3TestFunction", + "StatementId": "lambda-allow-s3-my-function-test", + }, ) lambda_stubber.add_response( - 'get_policy', + "get_policy", get_policy_after_response, - { - 'FunctionName': 'myPublicS3TestFunction' - } + {"FunctionName": "myPublicS3TestFunction"}, ) lambda_stubber.add_response( - 'get_policy', + "get_policy", get_policy_after_response, - { - 'FunctionName': 'myPublicS3TestFunction' - } + {"FunctionName": "myPublicS3TestFunction"}, ) lambda_stubber.activate() - mocker.patch('RemoveLambdaPublicAccess.connect_to_lambda', return_value=lambda_client) + mocker.patch( + "RemoveLambdaPublicAccess.connect_to_lambda", return_value=lambda_client + ) - assert remediation.remove_lambda_public_access(event, {}) == None + assert remediation.remove_lambda_public_access(event, {}) is None lambda_stubber.deactivate() diff --git a/source/remediation_runbooks/scripts/test/test_replacecodebuildcleartextcredentials.py b/source/remediation_runbooks/scripts/test/test_replacecodebuildcleartextcredentials.py index 3148c992..a6cd93a3 100644 --- a/source/remediation_runbooks/scripts/test/test_replacecodebuildcleartextcredentials.py +++ b/source/remediation_runbooks/scripts/test/test_replacecodebuildcleartextcredentials.py @@ -1,189 +1,171 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 from datetime import datetime + import boto3.session import botocore.session -from botocore.stub import Stubber, ANY -from botocore.config import Config import pytest - import ReplaceCodeBuildClearTextCredentials as remediation +from botocore.config import Config +from botocore.stub import ANY, Stubber + def get_region() -> str: my_session = boto3.session.Session() return my_session.region_name + def get_config() -> Config: - return Config( - retries = { - 'mode': 'standard' - }, - region_name = get_region() - ) + return Config(retries={"mode": "standard"}, region_name=get_region()) + class Case: def __init__(self, env_vars): self._env_vars = env_vars - self._project_name = 'invoke-codebuild-2' - self._service_role = f'codebuild-{ self._project_name }-service-role' - self._policy_name = f'CodeBuildSSMParameterPolicy-{ self._project_name }-{ get_region() }' - self._policy_arn = f'arn:aws:iam::111111111111:policy/{ self._policy_name }' + self._project_name = "invoke-codebuild-2" + self._service_role = f"codebuild-{ self._project_name }-service-role" + self._policy_name = ( + f"CodeBuildSSMParameterPolicy-{ self._project_name }-{ get_region() }" + ) + self._policy_arn = f"arn:aws:iam::111111111111:policy/{ self._policy_name }" self._policy_modtime = datetime.now() def event(self): return { - 'ProjectInfo': { - 'name': self._project_name, - 'arn': f'arn:aws:codebuild:{get_region()}:111111111111:project/{ self._project_name }', - 'source': { - 'type': 'NO_SOURCE', - 'gitCloneDepth': 1, - 'buildspec': 'version: 0.2\n\nphases:\n build:\n commands:\n - echo \"Hello world!\"\n', - 'insecureSsl': False - }, - 'secondarySources': [], - 'secondarySourceVersions':[], - 'artifacts': { - 'type': 'NO_ARTIFACTS' - }, - 'secondaryArtifacts': [], - 'cache': { - 'type': 'NO_CACHE' + "ProjectInfo": { + "name": self._project_name, + "arn": f"arn:aws:codebuild:{get_region()}:111111111111:project/{ self._project_name }", + "source": { + "type": "NO_SOURCE", + "gitCloneDepth": 1, + "buildspec": 'version: 0.2\n\nphases:\n build:\n commands:\n - echo "Hello world!"\n', + "insecureSsl": False, }, - 'environment': { - 'type': 'ARM_CONTAINER', - 'image': 'aws/codebuild/amazonlinux2-aarch64-standard:2.0', - 'computeType': 'BUILD_GENERAL1_SMALL', - 'environmentVariables': self._env_vars, - 'privilegedMode': False, - 'imagePullCredentialsType': 'CODEBUILD' + "secondarySources": [], + "secondarySourceVersions": [], + "artifacts": {"type": "NO_ARTIFACTS"}, + "secondaryArtifacts": [], + "cache": {"type": "NO_CACHE"}, + "environment": { + "type": "ARM_CONTAINER", + "image": "aws/codebuild/amazonlinux2-aarch64-standard:2.0", + "computeType": "BUILD_GENERAL1_SMALL", + "environmentVariables": self._env_vars, + "privilegedMode": False, + "imagePullCredentialsType": "CODEBUILD", }, - 'serviceRole': f'arn:aws:iam::111111111111:role/service-role/{ self._service_role }', - 'timeoutInMinutes': 60, - 'queuedTimeoutInMinutes': 480, - 'encryptionKey': f'arn:aws:kms:{get_region()}:111111111111:alias/aws/s3', - 'tags': [], - 'created': '2022-01-28T21:59:12.932000+00:00', - 'lastModified': '2022-02-02T19:16:05.722000+00:00', - 'badge': { - 'badgeEnabled': False + "serviceRole": f"arn:aws:iam::111111111111:role/service-role/{ self._service_role }", + "timeoutInMinutes": 60, + "queuedTimeoutInMinutes": 480, + "encryptionKey": f"arn:aws:kms:{get_region()}:111111111111:alias/aws/s3", + "tags": [], + "created": "2022-01-28T21:59:12.932000+00:00", + "lastModified": "2022-02-02T19:16:05.722000+00:00", + "badge": {"badgeEnabled": False}, + "logsConfig": { + "cloudWatchLogs": {"status": "DISABLED"}, + "s3Logs": {"status": "DISABLED", "encryptionDisabled": False}, }, - 'logsConfig': { - 'cloudWatchLogs': { - 'status': 'DISABLED' - }, - 's3Logs': { - 'status': 'DISABLED', - 'encryptionDisabled': False - } - }, - 'fileSystemLocations': [], - 'projectVisibility': 'PRIVATE' + "fileSystemLocations": [], + "projectVisibility": "PRIVATE", } } def parameter_name(self, env_var_name): - return f'{ remediation.get_project_ssm_namespace(self._project_name) }/env/{ env_var_name }' + return f"{ remediation.get_project_ssm_namespace(self._project_name) }/env/{ env_var_name }" def policy(self): return { - 'Policy': { - 'PolicyName': self._policy_name, - 'PolicyId': '1234567812345678', - 'Arn': self._policy_arn, - 'Path': '/', - 'DefaultVersionId': '', - 'AttachmentCount': 0, - 'PermissionsBoundaryUsageCount': 0, - 'IsAttachable': True, - 'Description': '', - 'CreateDate': self._policy_modtime, - 'UpdateDate': self._policy_modtime, - 'Tags': [] + "Policy": { + "PolicyName": self._policy_name, + "PolicyId": "1234567812345678", + "Arn": self._policy_arn, + "Path": "/", + "DefaultVersionId": "", + "AttachmentCount": 0, + "PermissionsBoundaryUsageCount": 0, + "IsAttachable": True, + "Description": "", + "CreateDate": self._policy_modtime, + "UpdateDate": self._policy_modtime, + "Tags": [], } } def policy_serialized(self): policy = self.policy() - policy['Policy']['CreateDate'] = policy['Policy']['CreateDate'].isoformat() - policy['Policy']['UpdateDate'] = policy['Policy']['UpdateDate'].isoformat() + policy["Policy"]["CreateDate"] = policy["Policy"]["CreateDate"].isoformat() + policy["Policy"]["UpdateDate"] = policy["Policy"]["UpdateDate"].isoformat() return policy def attach_params(self): - return { - 'PolicyArn': self._policy_arn, - 'RoleName': self._service_role - } + return {"PolicyArn": self._policy_arn, "RoleName": self._service_role} + def successful_parameter_response(): - return { - 'Tier': 'Standard', - 'Version': 1 - } + return {"Tier": "Standard", "Version": 1} + def test_success(mocker): env_vars = [ - { - 'name': 'AWS_ACCESS_KEY_ID', - 'value': 'test_value', - 'type': 'PLAINTEXT' - } + {"name": "AWS_ACCESS_KEY_ID", "value": "test_value", "type": "PLAINTEXT"} ] test_case = Case(env_vars) expected_env_vars = [ { - 'name': 'AWS_ACCESS_KEY_ID', - 'type': 'PARAMETER_STORE', - 'value': test_case.parameter_name(env_vars[0]['name']) + "name": "AWS_ACCESS_KEY_ID", + "type": "PARAMETER_STORE", + "value": test_case.parameter_name(env_vars[0]["name"]), } ] - ssm_client = botocore.session.get_session().create_client('ssm', config = get_config()) + ssm_client = botocore.session.get_session().create_client( + "ssm", config=get_config() + ) ssm_stubber = Stubber(ssm_client) ssm_stubber.add_response( - 'put_parameter', + "put_parameter", successful_parameter_response(), { - 'Name': test_case.parameter_name(env_vars[0]['name']), - 'Description': ANY, - 'Value': env_vars[0]['value'], - 'Type': 'SecureString', - 'Overwrite': False, - 'DataType': 'text' - } + "Name": test_case.parameter_name(env_vars[0]["name"]), + "Description": ANY, + "Value": env_vars[0]["value"], + "Type": "SecureString", + "Overwrite": False, + "DataType": "text", + }, ) ssm_stubber.activate() - iam_client = botocore.session.get_session().create_client('iam', config=get_config()) + iam_client = botocore.session.get_session().create_client( + "iam", config=get_config() + ) iam_stubber = Stubber(iam_client) - iam_stubber.add_response( - 'create_policy', - test_case.policy() - ) + iam_stubber.add_response("create_policy", test_case.policy()) - iam_stubber.add_response( - 'attach_role_policy', - {}, - test_case.attach_params() - ) + iam_stubber.add_response("attach_role_policy", {}, test_case.attach_params()) iam_stubber.activate() - mocker.patch('ReplaceCodeBuildClearTextCredentials.connect_to_ssm', return_value = ssm_client) - mocker.patch('ReplaceCodeBuildClearTextCredentials.connect_to_iam', return_value = iam_client) + mocker.patch( + "ReplaceCodeBuildClearTextCredentials.connect_to_ssm", return_value=ssm_client + ) + mocker.patch( + "ReplaceCodeBuildClearTextCredentials.connect_to_iam", return_value=iam_client + ) - project_env = test_case.event()['ProjectInfo']['environment'] - project_env['environmentVariables'] = expected_env_vars + project_env = test_case.event()["ProjectInfo"]["environment"] + project_env["environmentVariables"] = expected_env_vars successful_response = { - 'AttachResponse': {}, - 'Parameters': [successful_parameter_response()], - 'Policy': test_case.policy_serialized(), - 'UpdatedProjectEnv': project_env + "AttachResponse": {}, + "Parameters": [successful_parameter_response()], + "Policy": test_case.policy_serialized(), + "UpdatedProjectEnv": project_env, } assert remediation.replace_credentials(test_case.event(), {}) == successful_response @@ -191,90 +173,84 @@ def test_success(mocker): ssm_stubber.deactivate() iam_stubber.deactivate() + def test_multiple_params(mocker): env_vars = [ + {"name": "AWS_ACCESS_KEY_ID", "value": "test_value", "type": "PLAINTEXT"}, + {"name": "AWS_SECRET_ACCESS_KEY", "value": "test_value_2", "type": "PLAINTEXT"}, { - 'name': 'AWS_ACCESS_KEY_ID', - 'value': 'test_value', - 'type': 'PLAINTEXT' + "name": "AN_ACCEPTABLE_PARAMETER", + "value": "test_value_3", + "type": "PLAINTEXT", }, - { - 'name': 'AWS_SECRET_ACCESS_KEY', - 'value': 'test_value_2', - 'type': 'PLAINTEXT' - }, - { - 'name': 'AN_ACCEPTABLE_PARAMETER', - 'value': 'test_value_3', - 'type': 'PLAINTEXT' - } ] test_case = Case(env_vars) expected_env_vars = [ { - 'name': 'AWS_ACCESS_KEY_ID', - 'type': 'PARAMETER_STORE', - 'value': test_case.parameter_name(env_vars[0]['name']) + "name": "AWS_ACCESS_KEY_ID", + "type": "PARAMETER_STORE", + "value": test_case.parameter_name(env_vars[0]["name"]), }, { - 'name': 'AWS_SECRET_ACCESS_KEY', - 'type': 'PARAMETER_STORE', - 'value': test_case.parameter_name(env_vars[1]['name']) + "name": "AWS_SECRET_ACCESS_KEY", + "type": "PARAMETER_STORE", + "value": test_case.parameter_name(env_vars[1]["name"]), }, { - 'name': 'AN_ACCEPTABLE_PARAMETER', - 'value': 'test_value_3', - 'type': 'PLAINTEXT' - } + "name": "AN_ACCEPTABLE_PARAMETER", + "value": "test_value_3", + "type": "PLAINTEXT", + }, ] - ssm_client = botocore.session.get_session().create_client('ssm', config = get_config()) + ssm_client = botocore.session.get_session().create_client( + "ssm", config=get_config() + ) ssm_stubber = Stubber(ssm_client) for env_var in env_vars[0:2]: ssm_stubber.add_response( - 'put_parameter', + "put_parameter", successful_parameter_response(), { - 'Name': test_case.parameter_name(env_var['name']), - 'Description': ANY, - 'Value': env_var['value'], - 'Type': 'SecureString', - 'Overwrite': False, - 'DataType': 'text' - } + "Name": test_case.parameter_name(env_var["name"]), + "Description": ANY, + "Value": env_var["value"], + "Type": "SecureString", + "Overwrite": False, + "DataType": "text", + }, ) ssm_stubber.activate() - iam_client = botocore.session.get_session().create_client('iam', config=get_config()) + iam_client = botocore.session.get_session().create_client( + "iam", config=get_config() + ) iam_stubber = Stubber(iam_client) - iam_stubber.add_response( - 'create_policy', - test_case.policy() - ) + iam_stubber.add_response("create_policy", test_case.policy()) - iam_stubber.add_response( - 'attach_role_policy', - {}, - test_case.attach_params() - ) + iam_stubber.add_response("attach_role_policy", {}, test_case.attach_params()) iam_stubber.activate() - mocker.patch('ReplaceCodeBuildClearTextCredentials.connect_to_ssm', return_value = ssm_client) - mocker.patch('ReplaceCodeBuildClearTextCredentials.connect_to_iam', return_value = iam_client) + mocker.patch( + "ReplaceCodeBuildClearTextCredentials.connect_to_ssm", return_value=ssm_client + ) + mocker.patch( + "ReplaceCodeBuildClearTextCredentials.connect_to_iam", return_value=iam_client + ) - project_env = test_case.event()['ProjectInfo']['environment'] - project_env['environmentVariables'] = expected_env_vars + project_env = test_case.event()["ProjectInfo"]["environment"] + project_env["environmentVariables"] = expected_env_vars successful_response = { - 'AttachResponse': {}, - 'Parameters': [successful_parameter_response()] * 2, - 'Policy': test_case.policy_serialized(), - 'UpdatedProjectEnv': project_env + "AttachResponse": {}, + "Parameters": [successful_parameter_response()] * 2, + "Policy": test_case.policy_serialized(), + "UpdatedProjectEnv": project_env, } assert remediation.replace_credentials(test_case.event(), {}) == successful_response @@ -282,69 +258,67 @@ def test_multiple_params(mocker): ssm_stubber.deactivate() iam_stubber.deactivate() + def test_param_exists(mocker): env_vars = [ - { - 'name': 'AWS_ACCESS_KEY_ID', - 'value': 'test_value', - 'type': 'PLAINTEXT' - } + {"name": "AWS_ACCESS_KEY_ID", "value": "test_value", "type": "PLAINTEXT"} ] test_case = Case(env_vars) expected_env_vars = [ { - 'name': 'AWS_ACCESS_KEY_ID', - 'type': 'PARAMETER_STORE', - 'value': test_case.parameter_name(env_vars[0]['name']) + "name": "AWS_ACCESS_KEY_ID", + "type": "PARAMETER_STORE", + "value": test_case.parameter_name(env_vars[0]["name"]), } ] - ssm_client = botocore.session.get_session().create_client('ssm', config = get_config()) + ssm_client = botocore.session.get_session().create_client( + "ssm", config=get_config() + ) ssm_stubber = Stubber(ssm_client) ssm_stubber.add_client_error( - 'put_parameter', - 'ParameterAlreadyExists', - expected_params = { - 'Name': test_case.parameter_name(env_vars[0]['name']), - 'Description': ANY, - 'Value': env_vars[0]['value'], - 'Type': 'SecureString', - 'Overwrite': False, - 'DataType': 'text' - } + "put_parameter", + "ParameterAlreadyExists", + expected_params={ + "Name": test_case.parameter_name(env_vars[0]["name"]), + "Description": ANY, + "Value": env_vars[0]["value"], + "Type": "SecureString", + "Overwrite": False, + "DataType": "text", + }, ) ssm_stubber.activate() - iam_client = botocore.session.get_session().create_client('iam', config=get_config()) + iam_client = botocore.session.get_session().create_client( + "iam", config=get_config() + ) iam_stubber = Stubber(iam_client) - iam_stubber.add_response( - 'create_policy', - test_case.policy() - ) + iam_stubber.add_response("create_policy", test_case.policy()) - iam_stubber.add_response( - 'attach_role_policy', - {}, - test_case.attach_params() - ) + iam_stubber.add_response("attach_role_policy", {}, test_case.attach_params()) iam_stubber.activate() - mocker.patch('ReplaceCodeBuildClearTextCredentials.connect_to_ssm', return_value = ssm_client) - mocker.patch('ReplaceCodeBuildClearTextCredentials.connect_to_iam', return_value = iam_client) + mocker.patch( + "ReplaceCodeBuildClearTextCredentials.connect_to_ssm", return_value=ssm_client + ) + mocker.patch( + "ReplaceCodeBuildClearTextCredentials.connect_to_iam", return_value=iam_client + ) - project_env = test_case.event()['ProjectInfo']['environment'] - project_env['environmentVariables'] = expected_env_vars + project_env = test_case.event()["ProjectInfo"]["environment"] + project_env["environmentVariables"] = expected_env_vars successful_response = { - 'AttachResponse': {}, - 'Parameters': [None], - 'Policy': test_case.policy_serialized(), - 'UpdatedProjectEnv': project_env + "AttachResponse": {}, + "Parameters": [None], + "Policy": test_case.policy_serialized(), + "UpdatedProjectEnv": project_env, } assert remediation.replace_credentials(test_case.event(), {}) == successful_response @@ -352,73 +326,67 @@ def test_param_exists(mocker): ssm_stubber.deactivate() iam_stubber.deactivate() + def test_policy_exists(mocker): env_vars = [ - { - 'name': 'AWS_ACCESS_KEY_ID', - 'value': 'test_value', - 'type': 'PLAINTEXT' - } + {"name": "AWS_ACCESS_KEY_ID", "value": "test_value", "type": "PLAINTEXT"} ] test_case = Case(env_vars) expected_env_vars = [ { - 'name': 'AWS_ACCESS_KEY_ID', - 'type': 'PARAMETER_STORE', - 'value': test_case.parameter_name(env_vars[0]['name']) + "name": "AWS_ACCESS_KEY_ID", + "type": "PARAMETER_STORE", + "value": test_case.parameter_name(env_vars[0]["name"]), } ] - ssm_client = botocore.session.get_session().create_client('ssm', config = get_config()) + ssm_client = botocore.session.get_session().create_client( + "ssm", config=get_config() + ) ssm_stubber = Stubber(ssm_client) ssm_stubber.add_response( - 'put_parameter', + "put_parameter", successful_parameter_response(), { - 'Name': test_case.parameter_name(env_vars[0]['name']), - 'Description': ANY, - 'Value': env_vars[0]['value'], - 'Type': 'SecureString', - 'Overwrite': False, - 'DataType': 'text' - } + "Name": test_case.parameter_name(env_vars[0]["name"]), + "Description": ANY, + "Value": env_vars[0]["value"], + "Type": "SecureString", + "Overwrite": False, + "DataType": "text", + }, ) ssm_stubber.activate() - iam_client = botocore.session.get_session().create_client('iam', config=get_config()) + iam_client = botocore.session.get_session().create_client( + "iam", config=get_config() + ) iam_stubber = Stubber(iam_client) - iam_stubber.add_client_error( - 'create_policy', - 'EntityAlreadyExists' - ) + iam_stubber.add_client_error("create_policy", "EntityAlreadyExists") - iam_stubber.add_response( - 'attach_role_policy', - {}, - test_case.attach_params() - ) + iam_stubber.add_response("attach_role_policy", {}, test_case.attach_params()) iam_stubber.activate() - mocker.patch('ReplaceCodeBuildClearTextCredentials.connect_to_ssm', return_value = ssm_client) - mocker.patch('ReplaceCodeBuildClearTextCredentials.connect_to_iam', return_value = iam_client) + mocker.patch( + "ReplaceCodeBuildClearTextCredentials.connect_to_ssm", return_value=ssm_client + ) + mocker.patch( + "ReplaceCodeBuildClearTextCredentials.connect_to_iam", return_value=iam_client + ) - project_env = test_case.event()['ProjectInfo']['environment'] - project_env['environmentVariables'] = expected_env_vars + project_env = test_case.event()["ProjectInfo"]["environment"] + project_env["environmentVariables"] = expected_env_vars successful_response = { - 'AttachResponse': {}, - 'Parameters': [successful_parameter_response()], - 'Policy': { - 'Policy': { - 'Arn': test_case.policy_serialized()['Policy']['Arn'] - } - }, - 'UpdatedProjectEnv': project_env + "AttachResponse": {}, + "Parameters": [successful_parameter_response()], + "Policy": {"Policy": {"Arn": test_case.policy_serialized()["Policy"]["Arn"]}}, + "UpdatedProjectEnv": project_env, } assert remediation.replace_credentials(test_case.event(), {}) == successful_response @@ -426,79 +394,77 @@ def test_policy_exists(mocker): ssm_stubber.deactivate() iam_stubber.deactivate() + def test_new_param(mocker): env_vars = [ { - 'name': 'AWS_ACCESS_KEY_ID', - 'value': 'an_existing_parameter', - 'type': 'PARAMETER_STORE' + "name": "AWS_ACCESS_KEY_ID", + "value": "an_existing_parameter", + "type": "PARAMETER_STORE", }, - { - 'name': 'AWS_SECRET_ACCESS_KEY', - 'value': 'test_value_2', - 'type': 'PLAINTEXT' - } + {"name": "AWS_SECRET_ACCESS_KEY", "value": "test_value_2", "type": "PLAINTEXT"}, ] test_case = Case(env_vars) expected_env_vars = [ { - 'name': 'AWS_ACCESS_KEY_ID', - 'type': 'PARAMETER_STORE', - 'value': 'an_existing_parameter' + "name": "AWS_ACCESS_KEY_ID", + "type": "PARAMETER_STORE", + "value": "an_existing_parameter", }, { - 'name': 'AWS_SECRET_ACCESS_KEY', - 'type': 'PARAMETER_STORE', - 'value': test_case.parameter_name(env_vars[1]['name']) - } + "name": "AWS_SECRET_ACCESS_KEY", + "type": "PARAMETER_STORE", + "value": test_case.parameter_name(env_vars[1]["name"]), + }, ] - ssm_client = botocore.session.get_session().create_client('ssm', config = get_config()) + ssm_client = botocore.session.get_session().create_client( + "ssm", config=get_config() + ) ssm_stubber = Stubber(ssm_client) ssm_stubber.add_response( - 'put_parameter', + "put_parameter", successful_parameter_response(), { - 'Name': test_case.parameter_name(env_vars[1]['name']), - 'Description': ANY, - 'Value': env_vars[1]['value'], - 'Type': 'SecureString', - 'Overwrite': False, - 'DataType': 'text' - } + "Name": test_case.parameter_name(env_vars[1]["name"]), + "Description": ANY, + "Value": env_vars[1]["value"], + "Type": "SecureString", + "Overwrite": False, + "DataType": "text", + }, ) ssm_stubber.activate() - iam_client = botocore.session.get_session().create_client('iam', config=get_config()) + iam_client = botocore.session.get_session().create_client( + "iam", config=get_config() + ) iam_stubber = Stubber(iam_client) - iam_stubber.add_response( - 'create_policy', - test_case.policy() - ) + iam_stubber.add_response("create_policy", test_case.policy()) - iam_stubber.add_response( - 'attach_role_policy', - {}, - test_case.attach_params() - ) + iam_stubber.add_response("attach_role_policy", {}, test_case.attach_params()) iam_stubber.activate() - mocker.patch('ReplaceCodeBuildClearTextCredentials.connect_to_ssm', return_value = ssm_client) - mocker.patch('ReplaceCodeBuildClearTextCredentials.connect_to_iam', return_value = iam_client) + mocker.patch( + "ReplaceCodeBuildClearTextCredentials.connect_to_ssm", return_value=ssm_client + ) + mocker.patch( + "ReplaceCodeBuildClearTextCredentials.connect_to_iam", return_value=iam_client + ) - project_env = test_case.event()['ProjectInfo']['environment'] - project_env['environmentVariables'] = expected_env_vars + project_env = test_case.event()["ProjectInfo"]["environment"] + project_env["environmentVariables"] = expected_env_vars successful_response = { - 'AttachResponse': {}, - 'Parameters': [successful_parameter_response()], - 'Policy': test_case.policy_serialized(), - 'UpdatedProjectEnv': project_env + "AttachResponse": {}, + "Parameters": [successful_parameter_response()], + "Policy": test_case.policy_serialized(), + "UpdatedProjectEnv": project_env, } assert remediation.replace_credentials(test_case.event(), {}) == successful_response @@ -506,38 +472,41 @@ def test_new_param(mocker): ssm_stubber.deactivate() iam_stubber.deactivate() + def test_put_parameter_fails(mocker): env_vars = [ - { - 'name': 'AWS_ACCESS_KEY_ID', - 'value': 'test_value', - 'type': 'PLAINTEXT' - } + {"name": "AWS_ACCESS_KEY_ID", "value": "test_value", "type": "PLAINTEXT"} ] test_case = Case(env_vars) - ssm_client = botocore.session.get_session().create_client('ssm', config = get_config()) + ssm_client = botocore.session.get_session().create_client( + "ssm", config=get_config() + ) ssm_stubber = Stubber(ssm_client) ssm_stubber.add_client_error( - 'put_parameter', - ' InternalServerError', - http_status_code = 500, - expected_params = { - 'Name': test_case.parameter_name(env_vars[0]['name']), - 'Description': ANY, - 'Value': env_vars[0]['value'], - 'Type': 'SecureString', - 'Overwrite': False, - 'DataType': 'text' - } + "put_parameter", + " InternalServerError", + http_status_code=500, + expected_params={ + "Name": test_case.parameter_name(env_vars[0]["name"]), + "Description": ANY, + "Value": env_vars[0]["value"], + "Type": "SecureString", + "Overwrite": False, + "DataType": "text", + }, ) ssm_stubber.activate() - mocker.patch('ReplaceCodeBuildClearTextCredentials.connect_to_ssm', return_value = ssm_client) - mocker.patch('ReplaceCodeBuildClearTextCredentials.connect_to_iam', return_value = None) + mocker.patch( + "ReplaceCodeBuildClearTextCredentials.connect_to_ssm", return_value=ssm_client + ) + mocker.patch( + "ReplaceCodeBuildClearTextCredentials.connect_to_iam", return_value=None + ) with pytest.raises(SystemExit) as wrapped_exception: remediation.replace_credentials(test_case.event(), {}) @@ -545,56 +514,51 @@ def test_put_parameter_fails(mocker): ssm_stubber.deactivate() + def test_create_policy_fails(mocker): env_vars = [ - { - 'name': 'AWS_ACCESS_KEY_ID', - 'value': 'test_value', - 'type': 'PLAINTEXT' - } + {"name": "AWS_ACCESS_KEY_ID", "value": "test_value", "type": "PLAINTEXT"} ] test_case = Case(env_vars) - expected_env_vars = [ - { - 'name': 'AWS_ACCESS_KEY_ID', - 'type': 'PARAMETER_STORE', - 'value': test_case.parameter_name(env_vars[0]['name']) - } - ] - - ssm_client = botocore.session.get_session().create_client('ssm', config = get_config()) + ssm_client = botocore.session.get_session().create_client( + "ssm", config=get_config() + ) ssm_stubber = Stubber(ssm_client) ssm_stubber.add_response( - 'put_parameter', + "put_parameter", successful_parameter_response(), { - 'Name': test_case.parameter_name(env_vars[0]['name']), - 'Description': ANY, - 'Value': env_vars[0]['value'], - 'Type': 'SecureString', - 'Overwrite': False, - 'DataType': 'text' - } + "Name": test_case.parameter_name(env_vars[0]["name"]), + "Description": ANY, + "Value": env_vars[0]["value"], + "Type": "SecureString", + "Overwrite": False, + "DataType": "text", + }, ) ssm_stubber.activate() - iam_client = botocore.session.get_session().create_client('iam', config=get_config()) + iam_client = botocore.session.get_session().create_client( + "iam", config=get_config() + ) iam_stubber = Stubber(iam_client) iam_stubber.add_client_error( - 'create_policy', - ' ServiceFailure', - http_status_code = 500 + "create_policy", " ServiceFailure", http_status_code=500 ) iam_stubber.activate() - mocker.patch('ReplaceCodeBuildClearTextCredentials.connect_to_ssm', return_value = ssm_client) - mocker.patch('ReplaceCodeBuildClearTextCredentials.connect_to_iam', return_value = iam_client) + mocker.patch( + "ReplaceCodeBuildClearTextCredentials.connect_to_ssm", return_value=ssm_client + ) + mocker.patch( + "ReplaceCodeBuildClearTextCredentials.connect_to_iam", return_value=iam_client + ) with pytest.raises(SystemExit) as wrapped_exception: remediation.replace_credentials(test_case.event(), {}) @@ -603,62 +567,56 @@ def test_create_policy_fails(mocker): ssm_stubber.deactivate() iam_stubber.deactivate() + def test_attach_policy_fails(mocker): env_vars = [ - { - 'name': 'AWS_ACCESS_KEY_ID', - 'value': 'test_value', - 'type': 'PLAINTEXT' - } + {"name": "AWS_ACCESS_KEY_ID", "value": "test_value", "type": "PLAINTEXT"} ] test_case = Case(env_vars) - expected_env_vars = [ - { - 'name': 'AWS_ACCESS_KEY_ID', - 'type': 'PARAMETER_STORE', - 'value': test_case.parameter_name(env_vars[0]['name']) - } - ] - - ssm_client = botocore.session.get_session().create_client('ssm', config = get_config()) + ssm_client = botocore.session.get_session().create_client( + "ssm", config=get_config() + ) ssm_stubber = Stubber(ssm_client) ssm_stubber.add_response( - 'put_parameter', + "put_parameter", successful_parameter_response(), { - 'Name': test_case.parameter_name(env_vars[0]['name']), - 'Description': ANY, - 'Value': env_vars[0]['value'], - 'Type': 'SecureString', - 'Overwrite': False, - 'DataType': 'text' - } + "Name": test_case.parameter_name(env_vars[0]["name"]), + "Description": ANY, + "Value": env_vars[0]["value"], + "Type": "SecureString", + "Overwrite": False, + "DataType": "text", + }, ) ssm_stubber.activate() - iam_client = botocore.session.get_session().create_client('iam', config=get_config()) + iam_client = botocore.session.get_session().create_client( + "iam", config=get_config() + ) iam_stubber = Stubber(iam_client) - iam_stubber.add_response( - 'create_policy', - test_case.policy() - ) + iam_stubber.add_response("create_policy", test_case.policy()) iam_stubber.add_client_error( - 'attach_role_policy', - 'ServiceFailure', - http_status_code = 500, - expected_params = test_case.attach_params() + "attach_role_policy", + "ServiceFailure", + http_status_code=500, + expected_params=test_case.attach_params(), ) iam_stubber.activate() - mocker.patch('ReplaceCodeBuildClearTextCredentials.connect_to_ssm', return_value = ssm_client) - mocker.patch('ReplaceCodeBuildClearTextCredentials.connect_to_iam', return_value = iam_client) + mocker.patch( + "ReplaceCodeBuildClearTextCredentials.connect_to_ssm", return_value=ssm_client + ) + mocker.patch( + "ReplaceCodeBuildClearTextCredentials.connect_to_iam", return_value=iam_client + ) with pytest.raises(SystemExit) as wrapped_exception: remediation.replace_credentials(test_case.event(), {}) diff --git a/source/remediation_runbooks/scripts/test/test_revokeunrotatedkeys.py b/source/remediation_runbooks/scripts/test/test_revokeunrotatedkeys.py index 1a823c10..ca7105ac 100644 --- a/source/remediation_runbooks/scripts/test/test_revokeunrotatedkeys.py +++ b/source/remediation_runbooks/scripts/test/test_revokeunrotatedkeys.py @@ -1,45 +1,41 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import boto3 -import botocore.session -from botocore.stub import Stubber -from botocore.config import Config -import pytest -from pytest_mock import mocker from datetime import datetime, timezone +import boto3 +import botocore.session import RevokeUnrotatedKeys as remediation +from botocore.config import Config +from botocore.stub import Stubber my_session = boto3.session.Session() my_region = my_session.region_name -BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region -) +BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) + def str_time_to_datetime(dt_str): - dt_obj = datetime.strptime(dt_str, '%Y-%m-%dT%H:%M:%SZ').replace(tzinfo=timezone.utc) + dt_obj = datetime.strptime(dt_str, "%Y-%m-%dT%H:%M:%SZ").replace( + tzinfo=timezone.utc + ) return dt_obj + def iam_resource(): return { "resourceIdentifiers": [ { "resourceType": "AWS::IAM::User", "resourceId": "AIDACKCEVSQ6C2EXAMPLE", - "resourceName": "someuser" + "resourceName": "someuser", } ] } + def event(): - return { - "IAMResourceId": "AIDACKCEVSQ6C2EXAMPLE", - "MaxCredentialUsageAge": "90" - } + return {"IAMResourceId": "AIDACKCEVSQ6C2EXAMPLE", "MaxCredentialUsageAge": "90"} + def access_keys(): return { @@ -48,23 +44,24 @@ def access_keys(): "UserName": "someuser", "Status": "Active", "CreateDate": str_time_to_datetime("2015-05-22T14:43:16Z"), - "AccessKeyId": "AKIAIOSFODNN7EXAMPLE" + "AccessKeyId": "AKIAIOSFODNN7EXAMPLE", }, { "UserName": "someuser", "Status": "Active", "CreateDate": datetime.now(timezone.utc), - "AccessKeyId": "AKIAI44QH8DHBEXAMPLE" + "AccessKeyId": "AKIAI44QH8DHBEXAMPLE", }, { "UserName": "someuser", "Status": "Inactive", "CreateDate": str_time_to_datetime("2017-10-15T15:20:04Z"), - "AccessKeyId": "AKIAI44QH8DHBEXAMPLE" - } + "AccessKeyId": "AKIAI44QH8DHBEXAMPLE", + }, ] } + def updated_keys(): return { "AccessKeyMetadata": [ @@ -72,23 +69,24 @@ def updated_keys(): "UserName": "someuser", "Status": "Inactive", "CreateDate": str_time_to_datetime("2015-05-22T14:43:16Z"), - "AccessKeyId": "AKIAIOSFODNN7EXAMPLE" + "AccessKeyId": "AKIAIOSFODNN7EXAMPLE", }, { "UserName": "someuser", "Status": "Active", "CreateDate": datetime.now(timezone.utc), - "AccessKeyId": "AKIAI44QH8DHBEXAMPLE" + "AccessKeyId": "AKIAI44QH8DHBEXAMPLE", }, { "UserName": "someuser", "Status": "Inactive", "CreateDate": str_time_to_datetime("2017-10-15T15:20:04Z"), - "AccessKeyId": "AKIAI44QH8DHBEXAMPLE" - } + "AccessKeyId": "AKIAI44QH8DHBEXAMPLE", + }, ] } + def last_accessed_key(id): return { "AKIAIOSFODNN7EXAMPLE": { @@ -96,109 +94,91 @@ def last_accessed_key(id): "AccessKeyLastUsed": { "Region": "N/A", "ServiceName": "s3", - "LastUsedDate": str_time_to_datetime("2016-03-23T19:55:00Z") - } + "LastUsedDate": str_time_to_datetime("2016-03-23T19:55:00Z"), + }, }, "AKIAI44QH8DHBEXAMPLE": { "UserName": "someuser", "AccessKeyLastUsed": { "Region": "N/A", "ServiceName": "s3", - "LastUsedDate": datetime.now(timezone.utc) - } - } + "LastUsedDate": datetime.now(timezone.utc), + }, + }, }[id] + def successful(): return { - 'http_responses': { - 'DeactivateUnusedKeysResponse': [ + "http_responses": { + "DeactivateUnusedKeysResponse": [ { - 'AccessKeyId': 'AKIAIOSFODNN7EXAMPLE', - 'Response': { - 'ResponseMetadata': { - 'AccessKeyId': 'AKIAIOSFODNN7EXAMPLE' - } - } + "AccessKeyId": "AKIAIOSFODNN7EXAMPLE", + "Response": { + "ResponseMetadata": {"AccessKeyId": "AKIAIOSFODNN7EXAMPLE"} + }, } ] }, - 'output': 'Verification of unrotated access keys is successful.' + "output": "Verification of unrotated access keys is successful.", } -#===================================================================================== + +# ===================================================================================== # SUCCESS -#===================================================================================== +# ===================================================================================== def test_success(mocker): - - ### Clients - cfg_client = botocore.session.get_session().create_client('config', config=BOTO_CONFIG) + # Clients + cfg_client = botocore.session.get_session().create_client( + "config", config=BOTO_CONFIG + ) cfg_stubber = Stubber(cfg_client) cfg_stubber.add_response( - 'list_discovered_resources', + "list_discovered_resources", iam_resource(), - { - 'resourceType': 'AWS::IAM::User', - 'resourceIds': ['AIDACKCEVSQ6C2EXAMPLE'] - } + {"resourceType": "AWS::IAM::User", "resourceIds": ["AIDACKCEVSQ6C2EXAMPLE"]}, ) cfg_stubber.activate() - iam_client = botocore.session.get_session().create_client('iam', config=BOTO_CONFIG) + iam_client = botocore.session.get_session().create_client("iam", config=BOTO_CONFIG) iam_stubber = Stubber(iam_client) iam_stubber.add_response( - 'list_access_keys', - access_keys(), - { - 'UserName': 'someuser' - } + "list_access_keys", access_keys(), {"UserName": "someuser"} ) iam_stubber.add_response( - 'get_access_key_last_used', + "get_access_key_last_used", last_accessed_key("AKIAIOSFODNN7EXAMPLE"), - { - 'AccessKeyId': 'AKIAIOSFODNN7EXAMPLE' - } + {"AccessKeyId": "AKIAIOSFODNN7EXAMPLE"}, ) iam_stubber.add_response( - 'update_access_key', + "update_access_key", + {"ResponseMetadata": {"AccessKeyId": "AKIAIOSFODNN7EXAMPLE"}}, { - "ResponseMetadata": { - "AccessKeyId": "AKIAIOSFODNN7EXAMPLE" - } + "AccessKeyId": "AKIAIOSFODNN7EXAMPLE", + "UserName": "someuser", + "Status": "Inactive", }, - { - 'AccessKeyId': 'AKIAIOSFODNN7EXAMPLE', - 'UserName': 'someuser', - 'Status': 'Inactive' - } ) iam_stubber.add_response( - 'get_access_key_last_used', + "get_access_key_last_used", last_accessed_key("AKIAI44QH8DHBEXAMPLE"), - { - 'AccessKeyId': 'AKIAI44QH8DHBEXAMPLE' - } + {"AccessKeyId": "AKIAI44QH8DHBEXAMPLE"}, ) iam_stubber.add_response( - 'list_access_keys', - updated_keys(), - { - 'UserName': 'someuser' - } + "list_access_keys", updated_keys(), {"UserName": "someuser"} ) iam_stubber.activate() - mocker.patch('RevokeUnrotatedKeys.connect_to_config', return_value=cfg_client) - mocker.patch('RevokeUnrotatedKeys.connect_to_iam', return_value=iam_client) + mocker.patch("RevokeUnrotatedKeys.connect_to_config", return_value=cfg_client) + mocker.patch("RevokeUnrotatedKeys.connect_to_iam", return_value=iam_client) assert remediation.unrotated_key_handler(event(), {}) == successful() diff --git a/source/remediation_runbooks/scripts/test/test_s3SslOnlyBucketPolicy.py b/source/remediation_runbooks/scripts/test/test_s3SslOnlyBucketPolicy.py index 5f042599..e19f7be5 100644 --- a/source/remediation_runbooks/scripts/test/test_s3SslOnlyBucketPolicy.py +++ b/source/remediation_runbooks/scripts/test/test_s3SslOnlyBucketPolicy.py @@ -1,24 +1,18 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import json + import boto3 import botocore.session -from botocore.stub import Stubber -from botocore.config import Config -import pytest -from pytest_mock import mocker - import SetSSLBucketPolicy as remediation +from botocore.config import Config +from botocore.stub import Stubber my_session = boto3.session.Session() my_region = my_session.region_name -BOTO_CONFIG = Config( - retries ={ - 'mode': 'standard' - }, - region_name=my_region -) +BOTO_CONFIG = Config(retries={"mode": "standard"}, region_name=my_region) + def existing_policy(): return { @@ -28,122 +22,103 @@ def existing_policy(): { "Sid": "S3ReplicationPolicyStmt1", "Effect": "Allow", - "Principal": { - "AWS": "arn:aws:iam::111122223333:root" - }, + "Principal": {"AWS": "arn:aws:iam::111122223333:root"}, "Action": [ "s3:GetBucketVersioning", "s3:PutBucketVersioning", "s3:ReplicateObject", - "s3:ReplicateDelete" + "s3:ReplicateDelete", ], - "Resource": [ - "arn:aws:s3:::abucket", - "arn:aws:s3:::abucket/*" - ] + "Resource": ["arn:aws:s3:::abucket", "arn:aws:s3:::abucket/*"], } - ] + ], } + def policy_to_add(): return { "Sid": "AllowSSLRequestsOnly", "Action": "s3:*", "Effect": "Deny", - "Resource": [ - "arn:aws:s3:::abucket", - "arn:aws:s3:::abucket/*" - ], - "Condition": { - "Bool": { - "aws:SecureTransport": "false" - } - }, - "Principal": "*" + "Resource": ["arn:aws:s3:::abucket", "arn:aws:s3:::abucket/*"], + "Condition": {"Bool": {"aws:SecureTransport": "false"}}, + "Principal": "*", } + def new_policy_json(): return { "Id": "BucketPolicy", "Version": "2012-10-17", - "Statement": [ - policy_to_add() - ] + "Statement": [policy_to_add()], } + def response_metadata(): return { - 'ResponseMetadata': { - 'RequestId': 'A6NCY16443JH271V', - 'HostId': 'vmM0qqMatvgqF2uRvfI79NWUbKaEZHHk49er2WIptAvH420Euq3Ac+cg+CXUEl9kFe3x49Cl/+I=', - 'HTTPStatusCode': 204, - 'HTTPHeaders': { - 'x-amz-id-2': 'vmM0qqMatvgqF2uRvfI79NWUbKaEZHHk49er2WIptAvH420Euq3Ac+cg+CXUEl9kFe3x49Cl/+I=', - 'x-amz-request-id': 'A6NCY16443JH271V', - 'date': 'Wed, 20 Oct 2021 17:40:32 GMT', - 'server': 'AmazonS3' + "ResponseMetadata": { + "RequestId": "A6NCY16443JH271V", + "HostId": "vmM0qqMatvgqF2uRvfI79NWUbKaEZHHk49er2WIptAvH420Euq3Ac+cg+CXUEl9kFe3x49Cl/+I=", + "HTTPStatusCode": 204, + "HTTPHeaders": { + "x-amz-id-2": "vmM0qqMatvgqF2uRvfI79NWUbKaEZHHk49er2WIptAvH420Euq3Ac+cg+CXUEl9kFe3x49Cl/+I=", + "x-amz-request-id": "A6NCY16443JH271V", + "date": "Wed, 20 Oct 2021 17:40:32 GMT", + "server": "AmazonS3", }, - 'RetryAttempts': - 0 + "RetryAttempts": 0, } } + def event(): - return { - 'bucket': 'abucket', - 'accountid': '111111111111', - 'partition': 'aws' - } + return {"bucket": "abucket", "accountid": "111111111111", "partition": "aws"} + def test_new_policy(mocker): - s3_client = botocore.session.get_session().create_client('s3', config=BOTO_CONFIG) + s3_client = botocore.session.get_session().create_client("s3", config=BOTO_CONFIG) s3_stubber = Stubber(s3_client) s3_stubber.add_client_error( - 'get_bucket_policy', - service_error_code='NoSuchBucketPolicy', - expected_params={ - 'Bucket': 'abucket', - 'ExpectedBucketOwner': '111111111111' - } + "get_bucket_policy", + service_error_code="NoSuchBucketPolicy", + expected_params={"Bucket": "abucket", "ExpectedBucketOwner": "111111111111"}, ) s3_stubber.add_response( - 'put_bucket_policy', + "put_bucket_policy", response_metadata(), expected_params={ - 'Bucket': 'abucket', - 'Policy': json.dumps(new_policy_json(), indent=4), - 'ExpectedBucketOwner': '111111111111' - } + "Bucket": "abucket", + "Policy": json.dumps(new_policy_json(), indent=4), + "ExpectedBucketOwner": "111111111111", + }, ) s3_stubber.activate() - mocker.patch('SetSSLBucketPolicy.connect_to_s3', return_value=s3_client) - assert remediation.add_ssl_bucket_policy(event(), {}) == None + mocker.patch("SetSSLBucketPolicy.connect_to_s3", return_value=s3_client) + assert remediation.add_ssl_bucket_policy(event(), {}) is None s3_stubber.deactivate() + def test_add_to_policy(mocker): - s3_client = botocore.session.get_session().create_client('s3', config=BOTO_CONFIG) + s3_client = botocore.session.get_session().create_client("s3", config=BOTO_CONFIG) s3_stubber = Stubber(s3_client) s3_stubber.add_response( - 'get_bucket_policy', - { 'Policy': json.dumps(existing_policy()) }, - expected_params={ - 'Bucket': 'abucket', - 'ExpectedBucketOwner': '111111111111' - } + "get_bucket_policy", + {"Policy": json.dumps(existing_policy())}, + expected_params={"Bucket": "abucket", "ExpectedBucketOwner": "111111111111"}, ) new_policy = existing_policy() - new_policy['Statement'].append(policy_to_add()) + new_policy["Statement"].append(policy_to_add()) print(new_policy) s3_stubber.add_response( - 'put_bucket_policy', + "put_bucket_policy", {}, expected_params={ - 'Bucket': 'abucket', - 'Policy': json.dumps(new_policy, indent=4), - 'ExpectedBucketOwner': '111111111111' - } + "Bucket": "abucket", + "Policy": json.dumps(new_policy, indent=4), + "ExpectedBucketOwner": "111111111111", + }, ) s3_stubber.activate() - mocker.patch('SetSSLBucketPolicy.connect_to_s3', return_value=s3_client) - assert remediation.add_ssl_bucket_policy(event(), {}) == None + mocker.patch("SetSSLBucketPolicy.connect_to_s3", return_value=s3_client) + assert remediation.add_ssl_bucket_policy(event(), {}) is None s3_stubber.deactivate() diff --git a/source/solution_deploy/bin/solution_deploy.ts b/source/solution_deploy/bin/solution_deploy.ts index 9cf2d839..98806092 100644 --- a/source/solution_deploy/bin/solution_deploy.ts +++ b/source/solution_deploy/bin/solution_deploy.ts @@ -15,7 +15,7 @@ const SOLUTION_NAME = process.env['SOLUTION_NAME'] || 'unknown'; const SOLUTION_VERSION = process.env['DIST_VERSION'] || '%%VERSION%%'; const SOLUTION_TMN = process.env['SOLUTION_TRADEMARKEDNAME'] || 'unknown'; const SOLUTION_BUCKET = process.env['DIST_OUTPUT_BUCKET'] || 'unknown'; -const LAMBDA_RUNTIME_PYTHON = lambda.Runtime.PYTHON_3_9; +const LAMBDA_RUNTIME_PYTHON = lambda.Runtime.PYTHON_3_11; const app = new cdk.App(); cdk.Aspects.of(app).add(new cdk_nag.AwsSolutionsChecks({ verbose: true })); diff --git a/source/solution_deploy/cdk.json b/source/solution_deploy/cdk.json index 2a4e21c4..b6038fc2 100644 --- a/source/solution_deploy/cdk.json +++ b/source/solution_deploy/cdk.json @@ -2,6 +2,7 @@ "app": "npx ts-node bin/solution_deploy.ts", "versionReporting": false, "context": { - "aws-cdk:enableDiffNoFail": "true" + "aws-cdk:enableDiffNoFail": "true", + "@aws-cdk/aws-iam:standardizedServicePrincipals": true } } diff --git a/source/solution_deploy/source/action_target_provider.py b/source/solution_deploy/source/action_target_provider.py index c725b174..7ad0151a 100644 --- a/source/solution_deploy/source/action_target_provider.py +++ b/source/solution_deploy/source/action_target_provider.py @@ -13,13 +13,14 @@ # "ResponseURL": "https://bogus" # } -import os import json +import os + import boto3 +import cfnresponse from botocore.config import Config from botocore.exceptions import ClientError -from logger import Logger -import cfnresponse +from layer.logger import Logger # initialize logger LOG_LEVEL = os.getenv("log_level", "info") diff --git a/source/solution_deploy/source/cfnresponse.py b/source/solution_deploy/source/cfnresponse.py index 0aacadbe..288da22e 100644 --- a/source/solution_deploy/source/cfnresponse.py +++ b/source/solution_deploy/source/cfnresponse.py @@ -3,6 +3,7 @@ """Send custom resource status to CloudFormation""" import json + import urllib3 SUCCESS = "SUCCESS" diff --git a/source/solution_deploy/source/deployment_metrics_custom_resource.py b/source/solution_deploy/source/deployment_metrics_custom_resource.py new file mode 100644 index 00000000..69e2885d --- /dev/null +++ b/source/solution_deploy/source/deployment_metrics_custom_resource.py @@ -0,0 +1,52 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Custom resource provider that handles deployment actions""" + +import json +from logging import basicConfig, getLevelName, getLogger +from os import getenv + +import cfnresponse +from layer.metrics import Metrics + +basicConfig( + level=getLevelName(getenv("LOG_LEVEL", "INFO")) +) # NOSONAR This configures logging based on the environment variable that is set. +logger = getLogger(__name__) + + +def lambda_handler(event, context): + """Handle the Lambda request for a deployment action""" + response_data: dict[str, str] = {} + logger.debug(f"received event: {json.dumps(event)}") + + try: + properties = event.get("ResourceProperties", {}) + logger.info(json.dumps(properties)) + + if event.get("ResourceType", {}) == "Custom::DeploymentMetrics": + send_deployment_metrics(properties, event.get("RequestType", {})) + + cfnresponse.send(event, context, cfnresponse.SUCCESS, response_data) + except Exception as exc: + logger.exception(exc) + cfnresponse.send( + event, + context, + cfnresponse.FAILED, + response_data, + reason=str(exc), + ) + + +def send_deployment_metrics(properties, request_type): + """Send deployment metrics""" + metrics_obj = Metrics() + metrics_data = { + "Event": f"Solution{request_type}", + "CloudWatchDashboardEnabled": properties.get( + "CloudWatchMetricsDashboardEnabled", {} + ), + } + + metrics_obj.send_metrics(metrics_data) diff --git a/source/solution_deploy/source/test/__init__.py b/source/solution_deploy/source/test/__init__.py index 10ee6ec4..3866a6c2 100644 --- a/source/solution_deploy/source/test/__init__.py +++ b/source/solution_deploy/source/test/__init__.py @@ -1,5 +1,6 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import sys import os -sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', '..', 'LambdaLayers')) +import sys + +sys.path.append(os.path.join(os.path.dirname(__file__), "..", "..", "..", "layer")) diff --git a/source/solution_deploy/source/test/conftest.py b/source/solution_deploy/source/test/conftest.py index 3ea5b014..08432a5a 100644 --- a/source/solution_deploy/source/test/conftest.py +++ b/source/solution_deploy/source/test/conftest.py @@ -1,8 +1,10 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import os + import pytest + @pytest.fixture(scope="module", autouse=True) def aws_credentials(): os.environ["AWS_ACCESS_KEY_ID"] = "testing" diff --git a/source/solution_deploy/source/test/test_action_target_provider.py b/source/solution_deploy/source/test/test_action_target_provider.py index 4d5b6f6f..13276d32 100644 --- a/source/solution_deploy/source/test/test_action_target_provider.py +++ b/source/solution_deploy/source/test/test_action_target_provider.py @@ -5,19 +5,12 @@ # TODO: test that ID over 20 characters is rejected import os -import pytest -from pytest_mock import mocker -import boto3 -import botocore.session -from botocore.stub import Stubber, ANY -from botocore.exceptions import ClientError import random -from action_target_provider import ( - lambda_handler, - CustomAction, - get_securityhub_client, -) -from botocore.config import Config + +import boto3 +import pytest +from action_target_provider import CustomAction, get_securityhub_client, lambda_handler +from botocore.stub import ANY, Stubber os.environ["AWS_REGION"] = "us-east-1" os.environ["AWS_PARTITION"] = "aws" @@ -124,7 +117,7 @@ def test_create_already_exists(mocker): "Id": "ASRRemediationTest", }, ) - assert customAction.create() == None + assert customAction.create() is None sechub_stub.assert_no_pending_responses() sechub_stub.deactivate() diff --git a/source/solution_deploy/source/test/test_cfnresponse.py b/source/solution_deploy/source/test/test_cfnresponse.py index 39c305b1..2ed32e33 100644 --- a/source/solution_deploy/source/test/test_cfnresponse.py +++ b/source/solution_deploy/source/test/test_cfnresponse.py @@ -2,12 +2,12 @@ # SPDX-License-Identifier: Apache-2.0 """Test cfnresponse""" -import os import json +import os from unittest.mock import ANY -import pytest -from pytest_mock import mocker + import cfnresponse +import pytest os.environ["AWS_REGION"] = "us-east-1" os.environ["AWS_PARTITION"] = "aws" @@ -67,7 +67,7 @@ def body_correct( def test_send(urllib_mock, event, context): status = cfnresponse.SUCCESS - response_data = {} + response_data: dict[str, str] = {} cfnresponse.send(event, context, status, response_data) urllib_mock.request.assert_called_once_with( "PUT", event["ResponseURL"], body=ANY, headers=ANY diff --git a/source/solution_deploy/source/test/test_deployment_custom_resource.py b/source/solution_deploy/source/test/test_deployment_custom_resource.py new file mode 100644 index 00000000..f2273bbc --- /dev/null +++ b/source/solution_deploy/source/test/test_deployment_custom_resource.py @@ -0,0 +1,29 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test the custom resource provider for deployment actions""" + +from unittest.mock import ANY, patch + +from cfnresponse import SUCCESS +from deployment_metrics_custom_resource import lambda_handler + +metrics_data = {"Event": "SolutionCreate", "CloudWatchDashboardEnabled": "yes"} + + +def get_event(resource_type, request_type, cw_metrics_enabled): + return { + "ResourceType": resource_type, + "RequestType": request_type, + "ResourceProperties": { + "CloudWatchMetricsDashboardEnabled": cw_metrics_enabled, + }, + } + + +@patch("cfnresponse.send") +@patch("layer.metrics.Metrics.send_metrics") +def test_send_metrics(mock_send_metrics, mock_cfnresponse): + event = get_event("Custom::DeploymentMetrics", "Create", "yes") + lambda_handler(event, {}) + mock_send_metrics.assert_called_once_with(metrics_data) + mock_cfnresponse.assert_called_once_with(event, {}, SUCCESS, ANY) diff --git a/source/solution_deploy/source/test/test_wait_provider.py b/source/solution_deploy/source/test/test_wait_provider.py index 8b95b452..b9a931f9 100644 --- a/source/solution_deploy/source/test/test_wait_provider.py +++ b/source/solution_deploy/source/test/test_wait_provider.py @@ -3,8 +3,9 @@ """Test the custom resource provider for arbitrary wait""" from unittest.mock import ANY, patch -from wait_provider import lambda_handler + from cfnresponse import SUCCESS +from wait_provider import lambda_handler def get_event(create, update, delete, request_type): diff --git a/source/solution_deploy/source/wait_provider.py b/source/solution_deploy/source/wait_provider.py index 9ddd7587..85699a7b 100644 --- a/source/solution_deploy/source/wait_provider.py +++ b/source/solution_deploy/source/wait_provider.py @@ -2,13 +2,16 @@ # SPDX-License-Identifier: Apache-2.0 """Custom resource provider that waits for a specified time, then returns success""" -from os import getenv import json from logging import basicConfig, getLevelName, getLogger +from os import getenv from time import sleep + import cfnresponse -basicConfig(level=getLevelName(getenv("LOG_LEVEL", "INFO"))) #NOSONAR This configures logging based on the environment variable that is set. +basicConfig( + level=getLevelName(getenv("LOG_LEVEL", "INFO")) +) # NOSONAR This configures logging based on the environment variable that is set. logger = getLogger(__name__) @@ -16,14 +19,14 @@ class InvalidRequest(Exception): """Invalid wait request""" -def wait_seconds(wait: float): +def wait_seconds(wait: float) -> None: """Wait for `wait` seconds""" sleep(wait) def lambda_handler(event, context): """Handle the Lambda request for a wait""" - response_data = {} + response_data: dict[str, str] = {} try: properties = event.get("ResourceProperties", {}) diff --git a/source/test/__snapshots__/orchestrator.test.ts.snap b/source/test/__snapshots__/orchestrator.test.ts.snap index 706181c3..64fb87c0 100644 --- a/source/test/__snapshots__/orchestrator.test.ts.snap +++ b/source/test/__snapshots__/orchestrator.test.ts.snap @@ -249,24 +249,42 @@ exports[`test App Orchestrator Construct 1`] = ` "kms:GenerateDataKey", ], "Effect": "Allow", - "Resource": { - "Fn::Join": [ - "", - [ - "arn:", - { - "Ref": "AWS::Partition", - }, - ":kms:", - { - "Ref": "AWS::Region", - }, - ":", - { - "Ref": "AWS::AccountId", - }, - ":alias/bbb-SHARR-Key", + "Resource": [ + { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition", + }, + ":kms:", + { + "Ref": "AWS::Region", + }, + ":", + { + "Ref": "AWS::AccountId", + }, + ":alias/bbb-SHARR-Key", + ], + ], + }, + { + "Fn::GetAtt": [ + "SHARRKeyC551FE02", + "Value", ], + }, + ], + }, + { + "Action": "sqs:SendMessage", + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "SchedulingQueueB533E3CD", + "Arn", ], }, }, @@ -320,7 +338,7 @@ exports[`test App Orchestrator Construct 1`] = ` { "Ref": "AWS::Partition", }, - ":states:::lambda:invoke","Parameters":{"FunctionName":"arn:aws:lambda:us-east-1:111122223333:function/foobar","Payload.$":"$"}},"Automation Document is not Active":{"Type":"Pass","Parameters":{"Notification":{"Message.$":"States.Format('Automation Document ({}) is not active ({}) in the member account({}).', $.AutomationDocId, $.AutomationDocument.DocState, $.Finding.AwsAccountId)","State.$":"States.Format('REMEDIATIONNOTACTIVE')","updateSecHub":"yes"},"EventType.$":"$.EventType","Finding.$":"$.Finding","AccountId.$":"$.AutomationDocument.AccountId","AutomationDocId.$":"$.AutomationDocument.AutomationDocId","RemediationRole.$":"$.AutomationDocument.RemediationRole","ControlId.$":"$.AutomationDocument.ControlId","SecurityStandard.$":"$.AutomationDocument.SecurityStandard","SecurityStandardVersion.$":"$.AutomationDocument.SecurityStandardVersion"},"Next":"notify"},"Automation Doc Active?":{"Type":"Choice","Choices":[{"Variable":"$.AutomationDocument.DocState","StringEquals":"ACTIVE","Next":"Execute Remediation"},{"Variable":"$.AutomationDocument.DocState","StringEquals":"NOTACTIVE","Next":"Automation Document is not Active"},{"Variable":"$.AutomationDocument.DocState","StringEquals":"NOTENABLED","Next":"Security Standard is not enabled"},{"Variable":"$.AutomationDocument.DocState","StringEquals":"NOTFOUND","Next":"No Remediation for Control"}],"Default":"check_ssm_doc_state Error"},"Get Automation Document State":{"Next":"Automation Doc Active?","Retry":[{"ErrorEquals":["Lambda.ServiceException","Lambda.AWSLambdaException","Lambda.SdkClientException"],"IntervalSeconds":2,"MaxAttempts":6,"BackoffRate":2}],"Catch":[{"ErrorEquals":["States.ALL"],"Next":"Orchestrator Failed"}],"Type":"Task","Comment":"Get the status of the remediation automation document in the target account","TimeoutSeconds":60,"ResultPath":"$.AutomationDocument","ResultSelector":{"DocState.$":"$.Payload.status","Message.$":"$.Payload.message","SecurityStandard.$":"$.Payload.securitystandard","SecurityStandardVersion.$":"$.Payload.securitystandardversion","SecurityStandardSupported.$":"$.Payload.standardsupported","ControlId.$":"$.Payload.controlid","AccountId.$":"$.Payload.accountid","RemediationRole.$":"$.Payload.remediationrole","AutomationDocId.$":"$.Payload.automationdocid","ResourceRegion.$":"$.Payload.resourceregion"},"Resource":"arn:", + ":states:::lambda:invoke","Parameters":{"FunctionName":"arn:aws:lambda:us-east-1:111122223333:function/foobar","Payload.$":"$"}},"Automation Document is not Active":{"Type":"Pass","Parameters":{"Notification":{"Message.$":"States.Format('Automation Document ({}) is not active ({}) in the member account({}).', $.AutomationDocId, $.AutomationDocument.DocState, $.Finding.AwsAccountId)","State.$":"States.Format('REMEDIATIONNOTACTIVE')","updateSecHub":"yes"},"EventType.$":"$.EventType","Finding.$":"$.Finding","AccountId.$":"$.AutomationDocument.AccountId","AutomationDocId.$":"$.AutomationDocument.AutomationDocId","RemediationRole.$":"$.AutomationDocument.RemediationRole","ControlId.$":"$.AutomationDocument.ControlId","SecurityStandard.$":"$.AutomationDocument.SecurityStandard","SecurityStandardVersion.$":"$.AutomationDocument.SecurityStandardVersion"},"Next":"notify"},"Automation Doc Active?":{"Type":"Choice","Choices":[{"Variable":"$.AutomationDocument.DocState","StringEquals":"ACTIVE","Next":"Send Task Token"},{"Variable":"$.AutomationDocument.DocState","StringEquals":"NOTACTIVE","Next":"Automation Document is not Active"},{"Variable":"$.AutomationDocument.DocState","StringEquals":"NOTENABLED","Next":"Security Standard is not enabled"},{"Variable":"$.AutomationDocument.DocState","StringEquals":"NOTFOUND","Next":"No Remediation for Control"}],"Default":"check_ssm_doc_state Error"},"Get Automation Document State":{"Next":"Automation Doc Active?","Retry":[{"ErrorEquals":["Lambda.ServiceException","Lambda.AWSLambdaException","Lambda.SdkClientException"],"IntervalSeconds":2,"MaxAttempts":6,"BackoffRate":2}],"Catch":[{"ErrorEquals":["States.ALL"],"Next":"Orchestrator Failed"}],"Type":"Task","Comment":"Get the status of the remediation automation document in the target account","TimeoutSeconds":60,"ResultPath":"$.AutomationDocument","ResultSelector":{"DocState.$":"$.Payload.status","Message.$":"$.Payload.message","SecurityStandard.$":"$.Payload.securitystandard","SecurityStandardVersion.$":"$.Payload.securitystandardversion","SecurityStandardSupported.$":"$.Payload.standardsupported","ControlId.$":"$.Payload.controlid","AccountId.$":"$.Payload.accountid","RemediationRole.$":"$.Payload.remediationrole","AutomationDocId.$":"$.Payload.automationdocid","ResourceRegion.$":"$.Payload.resourceregion"},"Resource":"arn:", { "Ref": "AWS::Partition", }, @@ -328,7 +346,15 @@ exports[`test App Orchestrator Construct 1`] = ` { "Ref": "AWS::Partition", }, - ":states:::lambda:invoke","Parameters":{"FunctionName":"arn:aws:lambda:us-east-1:111122223333:function/foobar","Payload.$":"$"}},"Orchestrator Failed":{"Type":"Pass","Parameters":{"Notification":{"Message.$":"States.Format('Orchestrator failed: {}', $.Error)","State.$":"States.Format('LAMBDAERROR')","Details.$":"States.Format('Cause: {}', $.Cause)"},"Payload.$":"$"},"Next":"notify"},"Execute Remediation":{"Next":"Remediation Queued","Retry":[{"ErrorEquals":["Lambda.ServiceException","Lambda.AWSLambdaException","Lambda.SdkClientException"],"IntervalSeconds":2,"MaxAttempts":6,"BackoffRate":2}],"Catch":[{"ErrorEquals":["States.ALL"],"Next":"Orchestrator Failed"}],"Type":"Task","Comment":"Execute the SSM Automation Document in the target account","TimeoutSeconds":300,"HeartbeatSeconds":60,"ResultPath":"$.SSMExecution","ResultSelector":{"ExecState.$":"$.Payload.status","Message.$":"$.Payload.message","ExecId.$":"$.Payload.executionid","Account.$":"$.Payload.executionaccount","Region.$":"$.Payload.executionregion"},"Resource":"arn:", + ":states:::lambda:invoke","Parameters":{"FunctionName":"arn:aws:lambda:us-east-1:111122223333:function/foobar","Payload.$":"$"}},"Orchestrator Failed":{"Type":"Pass","Parameters":{"Notification":{"Message.$":"States.Format('Orchestrator failed: {}', $.Error)","State.$":"States.Format('LAMBDAERROR')","Details.$":"States.Format('Cause: {}', $.Cause)"},"Payload.$":"$"},"Next":"notify"},"Send Task Token":{"Next":"Remediation Wait","Catch":[{"ErrorEquals":["States.ALL"],"Next":"Orchestrator Failed"}],"Type":"Task","Comment":"Send Task Token to SQS Queue for Remediation Scheduling","Resource":"arn:", + { + "Ref": "AWS::Partition", + }, + ":states:::sqs:sendMessage.waitForTaskToken","Parameters":{"QueueUrl":"", + { + "Ref": "SchedulingQueueB533E3CD", + }, + "","MessageBody":{"RemediationDetails.$":"$","TaskToken.$":"$$.Task.Token","AccountId.$":"$.AutomationDocument.AccountId","ResourceRegion.$":"$.AutomationDocument.ResourceRegion","executionId.$":"$$.Execution.Id"}}},"Remediation Wait":{"Type":"Wait","Comment":"Waiting for remediation","TimestampPath":"$.PlannedTimestamp","Next":"Execute Remediation"},"Execute Remediation":{"Next":"Remediation Queued","Retry":[{"ErrorEquals":["Lambda.ServiceException","Lambda.AWSLambdaException","Lambda.SdkClientException"],"IntervalSeconds":2,"MaxAttempts":6,"BackoffRate":2}],"Catch":[{"ErrorEquals":["States.ALL"],"Next":"Orchestrator Failed"}],"Type":"Task","Comment":"Execute the SSM Automation Document in the target account","TimeoutSeconds":300,"HeartbeatSeconds":60,"ResultPath":"$.SSMExecution","ResultSelector":{"ExecState.$":"$.Payload.status","Message.$":"$.Payload.message","ExecId.$":"$.Payload.executionid","Account.$":"$.Payload.executionaccount","Region.$":"$.Payload.executionregion"},"Resource":"arn:", { "Ref": "AWS::Partition", }, @@ -340,7 +366,7 @@ exports[`test App Orchestrator Construct 1`] = ` { "Ref": "AWS::Partition", }, - ":states:::lambda:invoke","Parameters":{"FunctionName":"arn:aws:lambda:us-east-1:111122223333:function/foobar","Payload.$":"$"}},"Wait for Remediation":{"Type":"Wait","Seconds":15,"Next":"execMonitor"},"Remediation completed?":{"Type":"Choice","Choices":[{"Variable":"$.Remediation.RemediationState","StringEquals":"Failed","Next":"Remediation Failed"},{"Variable":"$.Remediation.ExecState","StringEquals":"Success","Next":"Remediation Succeeded"},{"Variable":"$.Remediation.ExecState","StringEquals":"TimedOut","Next":"Remediation Failed"},{"Variable":"$.Remediation.ExecState","StringEquals":"Cancelling","Next":"Remediation Failed"},{"Variable":"$.Remediation.ExecState","StringEquals":"Cancelled","Next":"Remediation Failed"},{"Variable":"$.Remediation.ExecState","StringEquals":"Failed","Next":"Remediation Failed"}],"Default":"Wait for Remediation"},"Remediation Failed":{"Type":"Pass","Comment":"Set parameters for notification","Parameters":{"EventType.$":"$.EventType","Finding.$":"$.Finding","SSMExecution.$":"$.SSMExecution","AutomationDocument.$":"$.AutomationDocument","Notification":{"Message.$":"States.Format('Remediation failed for {} control {} in account {}: {}', $.AutomationDocument.SecurityStandard, $.AutomationDocument.ControlId, $.AutomationDocument.AccountId, $.Remediation.Message)","State.$":"$.Remediation.ExecState","Details.$":"$.Remediation.LogData","ExecId.$":"$.Remediation.ExecId","AffectedObject.$":"$.Remediation.AffectedObject"}},"Next":"notify"},"Remediation Succeeded":{"Type":"Pass","Comment":"Set parameters for notification","Parameters":{"EventType.$":"$.EventType","Finding.$":"$.Finding","AccountId.$":"$.AutomationDocument.AccountId","AutomationDocId.$":"$.AutomationDocument.AutomationDocId","RemediationRole.$":"$.AutomationDocument.RemediationRole","ControlId.$":"$.AutomationDocument.ControlId","SecurityStandard.$":"$.AutomationDocument.SecurityStandard","SecurityStandardVersion.$":"$.AutomationDocument.SecurityStandardVersion","Notification":{"Message.$":"States.Format('Remediation succeeded for {} control {} in account {}: {}', $.AutomationDocument.SecurityStandard, $.AutomationDocument.ControlId, $.AutomationDocument.AccountId, $.Remediation.Message)","State.$":"States.Format('SUCCESS')","Details.$":"$.Remediation.LogData","ExecId.$":"$.Remediation.ExecId","AffectedObject.$":"$.Remediation.AffectedObject"}},"Next":"notify"},"check_ssm_doc_state Error":{"Type":"Pass","Parameters":{"Notification":{"Message.$":"States.Format('check_ssm_doc_state returned an error: {}', $.AutomationDocument.Message)","State.$":"States.Format('LAMBDAERROR')"},"EventType.$":"$.EventType","Finding.$":"$.Finding"},"Next":"notify"},"Security Standard is not enabled":{"Type":"Pass","Parameters":{"Notification":{"Message.$":"States.Format('Security Standard ({}) v{} is not enabled.', $.AutomationDocument.SecurityStandard, $.AutomationDocument.SecurityStandardVersion)","State.$":"States.Format('STANDARDNOTENABLED')","updateSecHub":"yes"},"EventType.$":"$.EventType","Finding.$":"$.Finding","AccountId.$":"$.AutomationDocument.AccountId","AutomationDocId.$":"$.AutomationDocument.AutomationDocId","RemediationRole.$":"$.AutomationDocument.RemediationRole","ControlId.$":"$.AutomationDocument.ControlId","SecurityStandard.$":"$.AutomationDocument.SecurityStandard","SecurityStandardVersion.$":"$.AutomationDocument.SecurityStandardVersion"},"Next":"notify"},"No Remediation for Control":{"Type":"Pass","Parameters":{"Notification":{"Message.$":"States.Format('Security Standard {} v{} control {} has no automated remediation.', $.AutomationDocument.SecurityStandard, $.AutomationDocument.SecurityStandardVersion, $.AutomationDocument.ControlId)","State.$":"States.Format('NOREMEDIATION')","updateSecHub":"yes"},"EventType.$":"$.EventType","Finding.$":"$.Finding","AccountId.$":"$.AutomationDocument.AccountId","AutomationDocId.$":"$.AutomationDocument.AutomationDocId","RemediationRole.$":"$.AutomationDocument.RemediationRole","ControlId.$":"$.AutomationDocument.ControlId","SecurityStandard.$":"$.AutomationDocument.SecurityStandard","SecurityStandardVersion.$":"$.AutomationDocument.SecurityStandardVersion"},"Next":"notify"}}},"ItemsPath":"$.Findings"},"EOJ":{"Type":"Pass","Comment":"END-OF-JOB","End":true}},"TimeoutSeconds":900}", + ":states:::lambda:invoke","Parameters":{"FunctionName":"arn:aws:lambda:us-east-1:111122223333:function/foobar","Payload.$":"$"}},"Wait for Remediation":{"Type":"Wait","Seconds":15,"Next":"execMonitor"},"Remediation completed?":{"Type":"Choice","Choices":[{"Variable":"$.Remediation.RemediationState","StringEquals":"Failed","Next":"Remediation Failed"},{"Variable":"$.Remediation.ExecState","StringEquals":"Success","Next":"Remediation Succeeded"},{"Variable":"$.Remediation.ExecState","StringEquals":"TimedOut","Next":"Remediation Failed"},{"Variable":"$.Remediation.ExecState","StringEquals":"Cancelling","Next":"Remediation Failed"},{"Variable":"$.Remediation.ExecState","StringEquals":"Cancelled","Next":"Remediation Failed"},{"Variable":"$.Remediation.ExecState","StringEquals":"Failed","Next":"Remediation Failed"}],"Default":"Wait for Remediation"},"Remediation Failed":{"Type":"Pass","Comment":"Set parameters for notification","Parameters":{"EventType.$":"$.EventType","Finding.$":"$.Finding","SSMExecution.$":"$.SSMExecution","AutomationDocument.$":"$.AutomationDocument","Notification":{"Message.$":"States.Format('Remediation failed for {} control {} in account {}: {}', $.AutomationDocument.SecurityStandard, $.AutomationDocument.ControlId, $.AutomationDocument.AccountId, $.Remediation.Message)","State.$":"$.Remediation.ExecState","Details.$":"$.Remediation.LogData","ExecId.$":"$.Remediation.ExecId","AffectedObject.$":"$.Remediation.AffectedObject"}},"Next":"notify"},"Remediation Succeeded":{"Type":"Pass","Comment":"Set parameters for notification","Parameters":{"EventType.$":"$.EventType","Finding.$":"$.Finding","AccountId.$":"$.AutomationDocument.AccountId","AutomationDocId.$":"$.AutomationDocument.AutomationDocId","RemediationRole.$":"$.AutomationDocument.RemediationRole","ControlId.$":"$.AutomationDocument.ControlId","SecurityStandard.$":"$.AutomationDocument.SecurityStandard","SecurityStandardVersion.$":"$.AutomationDocument.SecurityStandardVersion","Notification":{"Message.$":"States.Format('Remediation succeeded for {} control {} in account {}: {}', $.AutomationDocument.SecurityStandard, $.AutomationDocument.ControlId, $.AutomationDocument.AccountId, $.Remediation.Message)","State.$":"States.Format('SUCCESS')","Details.$":"$.Remediation.LogData","ExecId.$":"$.Remediation.ExecId","AffectedObject.$":"$.Remediation.AffectedObject"}},"Next":"notify"},"check_ssm_doc_state Error":{"Type":"Pass","Parameters":{"Notification":{"Message.$":"States.Format('check_ssm_doc_state returned an error: {}', $.AutomationDocument.Message)","State.$":"States.Format('LAMBDAERROR')"},"EventType.$":"$.EventType","Finding.$":"$.Finding"},"Next":"notify"},"Security Standard is not enabled":{"Type":"Pass","Parameters":{"Notification":{"Message.$":"States.Format('Security Standard ({}) v{} is not enabled.', $.AutomationDocument.SecurityStandard, $.AutomationDocument.SecurityStandardVersion)","State.$":"States.Format('STANDARDNOTENABLED')","updateSecHub":"yes"},"EventType.$":"$.EventType","Finding.$":"$.Finding","AccountId.$":"$.AutomationDocument.AccountId","AutomationDocId.$":"$.AutomationDocument.AutomationDocId","RemediationRole.$":"$.AutomationDocument.RemediationRole","ControlId.$":"$.AutomationDocument.ControlId","SecurityStandard.$":"$.AutomationDocument.SecurityStandard","SecurityStandardVersion.$":"$.AutomationDocument.SecurityStandardVersion"},"Next":"notify"},"No Remediation for Control":{"Type":"Pass","Parameters":{"Notification":{"Message.$":"States.Format('Security Standard {} v{} control {} has no automated remediation.', $.AutomationDocument.SecurityStandard, $.AutomationDocument.SecurityStandardVersion, $.AutomationDocument.ControlId)","State.$":"States.Format('NOREMEDIATION')","updateSecHub":"yes"},"EventType.$":"$.EventType","Finding.$":"$.Finding","AccountId.$":"$.AutomationDocument.AccountId","AutomationDocId.$":"$.AutomationDocument.AutomationDocId","RemediationRole.$":"$.AutomationDocument.RemediationRole","ControlId.$":"$.AutomationDocument.ControlId","SecurityStandard.$":"$.AutomationDocument.SecurityStandard","SecurityStandardVersion.$":"$.AutomationDocument.SecurityStandardVersion"},"Next":"notify"}}},"ItemsPath":"$.Findings"},"EOJ":{"Type":"Pass","Comment":"END-OF-JOB","End":true}},"TimeoutSeconds":5400}", ], ], }, @@ -474,6 +500,52 @@ exports[`test App Orchestrator Construct 1`] = ` "Type": "AWS::KMS::Key", "UpdateReplacePolicy": "Retain", }, + "SchedulingQueueB533E3CD": { + "DeletionPolicy": "Delete", + "Properties": { + "KmsMasterKeyId": { + "Fn::GetAtt": [ + "SHARRkeyE6BD0F56", + "Arn", + ], + }, + }, + "Type": "AWS::SQS::Queue", + "UpdateReplacePolicy": "Delete", + }, + "SchedulingQueuePolicy36FAAC29": { + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": "sqs:*", + "Condition": { + "Bool": { + "aws:SecureTransport": "false", + }, + }, + "Effect": "Deny", + "Principal": { + "AWS": "*", + }, + "Resource": { + "Fn::GetAtt": [ + "SchedulingQueueB533E3CD", + "Arn", + ], + }, + }, + ], + "Version": "2012-10-17", + }, + "Queues": [ + { + "Ref": "SchedulingQueueB533E3CD", + }, + ], + }, + "Type": "AWS::SQS::QueuePolicy", + }, }, } `; diff --git a/source/test/__snapshots__/orchestrator_logs.test.ts.snap b/source/test/__snapshots__/orchestrator_logs.test.ts.snap index b5475168..fa82dd2d 100644 --- a/source/test/__snapshots__/orchestrator_logs.test.ts.snap +++ b/source/test/__snapshots__/orchestrator_logs.test.ts.snap @@ -82,7 +82,7 @@ exports[`Global Roles Stack 1`] = ` }, "Properties": { "LogGroupName": "TestLogGroup", - "RetentionInDays": 365, + "RetentionInDays": 3653, }, "Type": "AWS::Logs::LogGroup", "UpdateReplacePolicy": "Retain", @@ -95,7 +95,7 @@ exports[`Global Roles Stack 1`] = ` "Ref": "KmsKeyArn", }, "LogGroupName": "TestLogGroup", - "RetentionInDays": 365, + "RetentionInDays": 3653, }, "Type": "AWS::Logs::LogGroup", "UpdateReplacePolicy": "Retain", diff --git a/source/test/__snapshots__/runbook_stack.test.ts.snap b/source/test/__snapshots__/runbook_stack.test.ts.snap index e21ca99b..78a331a7 100644 --- a/source/test/__snapshots__/runbook_stack.test.ts.snap +++ b/source/test/__snapshots__/runbook_stack.test.ts.snap @@ -233,6 +233,139 @@ exports[`Regional Documents 1`] = ` }, }, "Resources": { + "ASRBlockSSMDocumentPublicAccess": { + "DependsOn": [ + "CreateWait9", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-BlockSSMDocumentPublicAccess + +## What does this document do? +This document modifies SSM document permissions to prevent cross-account public access. + +## Input Parameters +* DocumentArn: (Required) SSM Document name to be changed. +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* BlockSSMDocumentPublicAccess.Output +", + "mainSteps": [ + { + "action": "aws:executeScript", + "description": "## Remediation +Removes public access to the SSM Document +", + "inputs": { + "Handler": "lambda_handler", + "InputPayload": { + "document_arn": "{{DocumentArn}}", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +from typing import TypedDict + +import boto3 +from botocore.config import Config + + +class EventType(TypedDict): + accountid: str + name: str + + +boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) + + +def connect_to_ssm(): + return boto3.client("ssm", config=boto_config) + + +def lambda_handler(event: EventType, _): + """ + remediates SSM.4 by disabling public access to SSM documents + On success returns True + On failure returns NoneType + """ + + try: + document_arn = event["document_arn"] + document_name = document_arn.split("/")[1] + document_perimissions = describe_document_permissions(document_name) + if "all" in document_perimissions.get("AccountIds"): + modify_document_permissions(document_name) + else: + exit(f"No change was made to {document_name}") + + verify_document_permissions = describe_document_permissions(document_name) + + if "all" not in verify_document_permissions.get("AccountIds"): + return {"isPublic": "False"} + else: + raise RuntimeError + + except Exception as e: + exit(f"Failed to retrieve the SSM Document permission: {str(e)}") + + +def describe_document_permissions(document_name): + ssm_client = connect_to_ssm() + try: + document_permissions = ssm_client.describe_document_permission( + Name=document_name, PermissionType="Share" + ) + return document_permissions + except Exception as e: + exit(f"Failed to describe SSM Document {document_name}: {str(e)}") + + +def modify_document_permissions(document_name): + ssm_client = connect_to_ssm() + try: + ssm_client.modify_document_permission( + Name=document_name, AccountIdsToRemove=["all"], PermissionType="Share" + ) + except Exception as e: + exit(f"Failed to modify SSM Document {document_name}: {str(e)}")", + }, + "name": "BlockSSMDocumentPublicAccess", + "outputs": [ + { + "Name": "Output", + "Selector": "$.Payload.response", + "Type": "StringMap", + }, + ], + "timeoutSeconds": 600, + }, + ], + "outputs": [ + "BlockSSMDocumentPublicAccess.Output", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "DocumentArn": { + "allowedPattern": "^(arn:(?:aws|aws-cn|aws-us-gov):ssm:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:document\\/[A-Za-z0-9][A-Za-z0-9\\-_]{1,254})$", + "description": "(Required) The document ARN.", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-BlockSSMDocumentPublicAccess", + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, "ASRConfigureS3BucketPublicAccessBlock": { "DependsOn": [ "CreateWait4", @@ -594,7 +727,7 @@ This document creates an SNS topic if it does not already exist, then updates th * StackArn: (Required) The ARN of the stack. ## Security Standards / Controls -* AFSBP v1.0.0: CloudFormation.1 +* AWS FSBP v1.0.0: CloudFormation.1 ", "mainSteps": [ { @@ -612,12 +745,19 @@ This document creates an SNS topic if it does not already exist, then updates th Configure a CloudFormation stack with an SNS topic for notifications, creating the topic if it does not already exist """ +from time import sleep, time +from typing import TYPE_CHECKING -from time import time, sleep import boto3 from botocore.config import Config -boto_config = Config(retries={ 'mode': 'standard' }) +if TYPE_CHECKING: + from mypy_boto3_sns.client import SNSClient +else: + SNSClient = object + +boto_config = Config(retries={"mode": "standard"}) + def lambda_handler(event, _): """ @@ -630,65 +770,70 @@ def lambda_handler(event, _): \`context\` is ignored """ - stack_arn = event['stack_arn'] - topic_name = event['topic_name'] + stack_arn = event["stack_arn"] + topic_name = event["topic_name"] topic_arn = get_or_create_topic(topic_name) configure_notifications(stack_arn, topic_arn) wait_for_update(stack_arn) return assert_stack_configured(stack_arn, topic_arn) -def get_or_create_topic(topic_name: str): + +def get_or_create_topic(topic_name: str) -> str: """Get the SQS topic arn for the given topic name, creating it if it does not already exist""" - sns = boto3.client('sns', config=boto_config) + sns: SNSClient = boto3.client("sns", config=boto_config) response = sns.create_topic(Name=topic_name) - return response['TopicArn'] + return response["TopicArn"] -def configure_notifications(stack_arn: str, topic_arn: str): + +def configure_notifications(stack_arn: str, topic_arn: str) -> None: """Configure the stack with ARN \`stack_arn\` to notify the queue with ARN \`topic_arn\`""" - cloudformation = boto3.resource('cloudformation', config=boto_config) + cloudformation = boto3.resource("cloudformation", config=boto_config) stack = cloudformation.Stack(stack_arn) - kwargs = { - 'UsePreviousTemplate': True, - 'NotificationARNs': [topic_arn]} + kwargs = {"UsePreviousTemplate": True, "NotificationARNs": [topic_arn]} if stack.parameters: - kwargs['Parameters'] = [{ - 'ParameterKey': param['ParameterKey'], - 'UsePreviousValue': True - } for param in stack.parameters] + kwargs["Parameters"] = [ + {"ParameterKey": param["ParameterKey"], "UsePreviousValue": True} + for param in stack.parameters + ] if stack.capabilities: - kwargs['Capabilities'] = stack.capabilities + kwargs["Capabilities"] = stack.capabilities stack.update(**kwargs) + class UpdateTimeoutException(Exception): """Timed out waiting for the CloudFormation stack to update""" -def wait_for_update(stack_arn: str): + +def wait_for_update(stack_arn: str) -> None: """Wait for the stack with ARN \`stack_arn\` to be in status \`UPDATE_COMPLETE\`""" wait_interval_seconds = 10 timeout_seconds = 300 start = time() - while get_stack_status(stack_arn) != 'UPDATE_COMPLETE': + while get_stack_status(stack_arn) != "UPDATE_COMPLETE": if time() - start > timeout_seconds: - raise UpdateTimeoutException('Timed out waiting for stack update') + raise UpdateTimeoutException("Timed out waiting for stack update") wait_seconds(wait_interval_seconds) wait_interval_seconds = wait_interval_seconds * 2 + def get_stack_status(stack_arn): """Get the status of the CloudFormation stack with ARN \`stack_arn\`""" - cloudformation = boto3.client('cloudformation', config=boto_config) + cloudformation = boto3.client("cloudformation", config=boto_config) response = cloudformation.describe_stacks(StackName=stack_arn) - return response['Stacks'][0]['StackStatus'] + return response["Stacks"][0]["StackStatus"] + def wait_seconds(seconds): """Wait for \`seconds\` seconds""" sleep(seconds) + def assert_stack_configured(stack_arn, topic_arn): """ Verify that the CloudFormation stack with ARN \`stack_arn\` is configured to update the SQS topic with ARN \`topic_arn\` """ - cloudformation = boto3.resource('cloudformation', config=boto_config) + cloudformation = boto3.resource("cloudformation", config=boto_config) stack = cloudformation.Stack(stack_arn) wait_interval_seconds = 10 timeout_seconds = 300 @@ -696,11 +841,13 @@ def assert_stack_configured(stack_arn, topic_arn): while stack.notification_arns != [topic_arn]: if time() - start > timeout_seconds: raise StackConfigurationFailedException( - 'Timed out waiting for stack configuration to take effect') + "Timed out waiting for stack configuration to take effect" + ) wait_seconds(wait_interval_seconds) wait_interval_seconds = wait_interval_seconds * 2 stack.reload() - return { 'NotificationARNs': stack.notification_arns } + return {"NotificationARNs": stack.notification_arns} + class StackConfigurationFailedException(Exception): """An error occurred updating the CloudFormation stack to notify the SQS topic"""", @@ -775,29 +922,48 @@ Creates an S3 bucket for access logging. "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 +from typing import TYPE_CHECKING, TypedDict, cast + import boto3 -from botocore.exceptions import ClientError from botocore.config import Config -from typing import TYPE_CHECKING, Dict +from botocore.exceptions import ClientError if TYPE_CHECKING: - from mypy_boto3_s3 import S3Client from aws_lambda_powertools.utilities.typing import LambdaContext + from mypy_boto3_s3.client import S3Client + from mypy_boto3_s3.literals import BucketLocationConstraintType + from mypy_boto3_s3.type_defs import CreateBucketRequestRequestTypeDef else: S3Client = object LambdaContext = object + BucketLocationConstraintType = object + CreateBucketRequestRequestTypeDef = object def connect_to_s3(boto_config: Config) -> S3Client: - return boto3.client("s3", config=boto_config) + s3: S3Client = boto3.client("s3", config=boto_config) + return s3 + + +class Event(TypedDict): + BucketName: str + AWS_REGION: str + +class Output(TypedDict): + Message: str -def create_logging_bucket(event: Dict, _: LambdaContext) -> Dict: + +class Response(TypedDict): + output: Output + + +def create_logging_bucket(event: Event, _: LambdaContext) -> Response: boto_config = Config(retries={"mode": "standard"}) s3 = connect_to_s3(boto_config) try: - kwargs = { + kwargs: CreateBucketRequestRequestTypeDef = { "Bucket": event["BucketName"], "GrantWrite": "uri=http://acs.amazonaws.com/groups/s3/LogDelivery", "GrantReadACP": "uri=http://acs.amazonaws.com/groups/s3/LogDelivery", @@ -805,7 +971,9 @@ def create_logging_bucket(event: Dict, _: LambdaContext) -> Dict: } if event["AWS_REGION"] != "us-east-1": kwargs["CreateBucketConfiguration"] = { - "LocationConstraint": event["AWS_REGION"] + "LocationConstraint": cast( + BucketLocationConstraintType, event["AWS_REGION"] + ) } s3.create_bucket(**kwargs) @@ -884,7 +1052,7 @@ Note: this remediation will create a NEW trail. * KMSKeyArn (from SSM): Arn of the KMS key to be used to encrypt data ## Security Standards / Controls -* AFSBP v1.0.0: CloudTrail.1 +* AWS FSBP v1.0.0: CloudTrail.1 * CIS v1.2.0: 2.1 * PCI: CloudTrail.2 ", @@ -901,24 +1069,38 @@ Note: this remediation will create a NEW trail. "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 +from typing import TYPE_CHECKING, Dict, Literal, TypedDict, cast + import boto3 from botocore.config import Config from botocore.exceptions import ClientError -from typing import TYPE_CHECKING, Dict if TYPE_CHECKING: - from mypy_boto3_s3 import S3Client from aws_lambda_powertools.utilities.typing import LambdaContext + from mypy_boto3_s3.client import S3Client + from mypy_boto3_s3.literals import BucketLocationConstraintType + from mypy_boto3_s3.type_defs import CreateBucketRequestRequestTypeDef else: S3Client = object LambdaContext = object + BucketLocationConstraintType = object + CreateBucketRequestRequestTypeDef = object def connect_to_s3() -> S3Client: - return boto3.client("s3", config=Config(retries={"mode": "standard"})) + s3: S3Client = boto3.client("s3", config=Config(retries={"mode": "standard"})) + return s3 + +class Event(TypedDict): + account: str + region: str + kms_key_arn: str -def create_logging_bucket(event: Dict, _: LambdaContext) -> Dict: + +def create_logging_bucket( + event: Event, _: LambdaContext +) -> Dict[Literal["logging_bucket"], str]: s3 = connect_to_s3() kms_key_arn: str = event["kms_key_arn"] @@ -937,13 +1119,15 @@ def create_logging_bucket(event: Dict, _: LambdaContext) -> Dict: def create_bucket(s3: S3Client, bucket_name: str, aws_region: str) -> str: try: - kwargs = { + kwargs: CreateBucketRequestRequestTypeDef = { "Bucket": bucket_name, "ACL": "private", "ObjectOwnership": "ObjectWriter", } if aws_region != "us-east-1": - kwargs["CreateBucketConfiguration"] = {"LocationConstraint": aws_region} + kwargs["CreateBucketConfiguration"] = { + "LocationConstraint": cast(BucketLocationConstraintType, aws_region) + } s3.create_bucket(**kwargs) return "success" @@ -1158,25 +1342,22 @@ def put_bucket_logging(s3, bucket_name, logging_bucket): "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import json + import boto3 from botocore.config import Config -from botocore.exceptions import ClientError + def connect_to_s3(boto_config): - return boto3.client('s3', config=boto_config) + return boto3.client("s3", config=boto_config) -def create_bucket_policy(event, _): - boto_config = Config( - retries ={ - 'mode': 'standard' - } - ) +def create_bucket_policy(event, _): + boto_config = Config(retries={"mode": "standard"}) s3 = connect_to_s3(boto_config) - cloudtrail_bucket = event['cloudtrail_bucket'] - aws_partition = event['partition'] - aws_account = event['account'] + cloudtrail_bucket = event["cloudtrail_bucket"] + aws_partition = event["partition"] + aws_account = event["account"] try: bucket_policy = { "Version": "2012-10-17", @@ -1184,56 +1365,46 @@ def create_bucket_policy(event, _): { "Sid": "AWSCloudTrailAclCheck20150319", "Effect": "Allow", - "Principal": { - "Service": [ - "cloudtrail.amazonaws.com" - ] - }, + "Principal": {"Service": ["cloudtrail.amazonaws.com"]}, "Action": "s3:GetBucketAcl", - "Resource": "arn:" + aws_partition + ":s3:::" + cloudtrail_bucket + "Resource": "arn:" + aws_partition + ":s3:::" + cloudtrail_bucket, }, { "Sid": "AWSCloudTrailWrite20150319", "Effect": "Allow", - "Principal": { - "Service": [ - "cloudtrail.amazonaws.com" - ] - }, + "Principal": {"Service": ["cloudtrail.amazonaws.com"]}, "Action": "s3:PutObject", - "Resource": "arn:" + aws_partition + ":s3:::" + cloudtrail_bucket + "/AWSLogs/" + aws_account + "/*", + "Resource": "arn:" + + aws_partition + + ":s3:::" + + cloudtrail_bucket + + "/AWSLogs/" + + aws_account + + "/*", "Condition": { - "StringEquals": { - "s3:x-amz-acl": "bucket-owner-full-control" - }, - } + "StringEquals": {"s3:x-amz-acl": "bucket-owner-full-control"}, + }, }, { "Sid": "AllowSSLRequestsOnly", "Effect": "Deny", "Principal": "*", "Action": "s3:*", - "Resource": ["arn:" + aws_partition + ":s3:::" + cloudtrail_bucket ,"arn:" + aws_partition + ":s3:::" + cloudtrail_bucket + "/*"], - "Condition": { - "Bool": { - "aws:SecureTransport": "false" - } - } - } - ] + "Resource": [ + "arn:" + aws_partition + ":s3:::" + cloudtrail_bucket, + "arn:" + aws_partition + ":s3:::" + cloudtrail_bucket + "/*", + ], + "Condition": {"Bool": {"aws:SecureTransport": "false"}}, + }, + ], } - s3.put_bucket_policy( - Bucket=cloudtrail_bucket, - Policy=json.dumps(bucket_policy) - ) + s3.put_bucket_policy(Bucket=cloudtrail_bucket, Policy=json.dumps(bucket_policy)) return { - "output": { - "Message": f'Set bucket policy for bucket {cloudtrail_bucket}' - } + "output": {"Message": f"Set bucket policy for bucket {cloudtrail_bucket}"} } except Exception as e: print(e) - exit('PutBucketPolicy failed: ' + str(e))", + exit("PutBucketPolicy failed: " + str(e))", }, "isEnd": false, "name": "CreateCloudTrailBucketPolicy", @@ -1251,39 +1422,31 @@ def create_bucket_policy(event, _): # SPDX-License-Identifier: Apache-2.0 import boto3 from botocore.config import Config -from botocore.exceptions import ClientError + def connect_to_cloudtrail(boto_config): - return boto3.client('cloudtrail', config=boto_config) + return boto3.client("cloudtrail", config=boto_config) -def enable_cloudtrail(event, _): - boto_config = Config( - retries ={ - 'mode': 'standard' - } - ) +def enable_cloudtrail(event, _): + boto_config = Config(retries={"mode": "standard"}) ct = connect_to_cloudtrail(boto_config) try: ct.create_trail( - Name='multi-region-cloud-trail', - S3BucketName=event['cloudtrail_bucket'], + Name="multi-region-cloud-trail", + S3BucketName=event["cloudtrail_bucket"], IncludeGlobalServiceEvents=True, EnableLogFileValidation=True, IsMultiRegionTrail=True, - KmsKeyId=event['kms_key_arn'] - ) - ct.start_logging( - Name='multi-region-cloud-trail' + KmsKeyId=event["kms_key_arn"], ) + ct.start_logging(Name="multi-region-cloud-trail") return { - "output": { - "Message": f'CloudTrail Trail multi-region-cloud-trail created' - } + "output": {"Message": "CloudTrail Trail multi-region-cloud-trail created"} } except Exception as e: - exit('Error enabling AWS Config: ' + str(e))", + exit("Error enabling AWS Config: " + str(e))", }, "isEnd": false, "name": "EnableCloudTrail", @@ -1307,15 +1470,17 @@ def enable_cloudtrail(event, _): "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 def process_results(event, _): - print(f'Created encrypted CloudTrail bucket {event["cloudtrail_bucket"]}') - print(f'Created access logging for CloudTrail bucket in bucket {event["logging_bucket"]}') - print('Enabled multi-region AWS CloudTrail') - return { - "response": { - "message": "AWS CloudTrail successfully enabled", - "status": "Success" - } - }", + print(f'Created encrypted CloudTrail bucket {event["cloudtrail_bucket"]}') + print( + f'Created access logging for CloudTrail bucket in bucket {event["logging_bucket"]}' + ) + print("Enabled multi-region AWS CloudTrail") + return { + "response": { + "message": "AWS CloudTrail successfully enabled", + "status": "Success", + } + }", }, "isEnd": true, "name": "Remediation", @@ -1396,12 +1561,20 @@ This step deactivates IAM user access keys that have not been rotated in more th "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import json -from botocore.config import Config +from typing import Dict, Final, List, Literal, TypedDict + import boto3 +from botocore.config import Config BOTO_CONFIG = Config(retries={"mode": "standard"}) -responses = {} + +class Response(TypedDict): + Account: str + RoleName: Literal["aws_incident_support_role"] + + +responses: Dict[Literal["CreateIAMRoleResponse"], List[Response]] = {} responses["CreateIAMRoleResponse"] = [] @@ -1410,11 +1583,15 @@ def connect_to_iam(boto_config): def get_account(boto_config): - return boto3.client('sts', config=boto_config).get_caller_identity()['Account'] + return boto3.client("sts", config=boto_config).get_caller_identity()["Account"] def get_partition(boto_config): - return boto3.client('sts', config=boto_config).get_caller_identity()['Arn'].split(':')[1] + return ( + boto3.client("sts", config=boto_config) + .get_caller_identity()["Arn"] + .split(":")[1] + ) def create_iam_role(_, __): @@ -1432,7 +1609,7 @@ def create_iam_role(_, __): ], } - role_name = "aws_incident_support_role" + role_name: Final = "aws_incident_support_role" iam = connect_to_iam(BOTO_CONFIG) if not does_role_exist(iam, role_name): iam.create_role( @@ -1544,94 +1721,90 @@ Creates a metric filter for a given log group and also creates and alarm for the "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import json + import boto3 from botocore.config import Config from botocore.exceptions import ClientError -boto_config = Config( - retries ={ - 'mode': 'standard' - } -) +boto_config = Config(retries={"mode": "standard"}) + def connect_to_sns(): - return boto3.client('sns', config=boto_config) + return boto3.client("sns", config=boto_config) + def connect_to_ssm(): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) -def create_encrypted_topic(event, _): - kms_key_arn = event['kms_key_arn'] +def create_encrypted_topic(event, _): + kms_key_arn = event["kms_key_arn"] new_topic = False - topic_arn = '' - topic_name = event['topic_name'] + topic_arn = "" + topic_name = event["topic_name"] try: sns = connect_to_sns() topic_arn = sns.create_topic( - Name=topic_name, - Attributes={ - 'KmsMasterKeyId': kms_key_arn.split('key/')[1] - } - )['TopicArn'] + Name=topic_name, Attributes={"KmsMasterKeyId": kms_key_arn.split("key/")[1]} + )["TopicArn"] new_topic = True except ClientError as client_exception: - exception_type = client_exception.response['Error']['Code'] - if exception_type == 'InvalidParameter': - print(f'Topic {topic_name} already exists. This remediation may have been run before.') - print('Ignoring exception - remediation continues.') - topic_arn = sns.create_topic( - Name=topic_name - )['TopicArn'] + exception_type = client_exception.response["Error"]["Code"] + if exception_type == "InvalidParameter": + print( + f"Topic {topic_name} already exists. This remediation may have been run before." + ) + print("Ignoring exception - remediation continues.") + topic_arn = sns.create_topic(Name=topic_name)["TopicArn"] else: - exit(f'ERROR: Unhandled client exception: {client_exception}') + exit(f"ERROR: Unhandled client exception: {client_exception}") except Exception as e: - exit(f'ERROR: could not create SNS Topic {topic_name}: {str(e)}') + exit(f"ERROR: could not create SNS Topic {topic_name}: {str(e)}") if new_topic: try: ssm = connect_to_ssm() ssm.put_parameter( - Name='/Solutions/SO0111/SNS_Topic_CIS3.x', - Description='SNS Topic for AWS Config updates', - Type='String', + Name="/Solutions/SO0111/SNS_Topic_CIS3.x", + Description="SNS Topic for AWS Config updates", + Type="String", Overwrite=True, - Value=topic_arn + Value=topic_arn, ) except Exception as e: - exit(f'ERROR: could not create SNS Topic {topic_name}: {str(e)}') + exit(f"ERROR: could not create SNS Topic {topic_name}: {str(e)}") create_topic_policy(topic_arn) return {"topic_arn": topic_arn} + def create_topic_policy(topic_arn): sns = connect_to_sns() try: topic_policy = { "Id": "Policy_ID", "Statement": [ - { - "Sid": "AWSConfigSNSPolicy", - "Effect": "Allow", - "Principal": { - "Service": "cloudwatch.amazonaws.com" - }, - "Action": "SNS:Publish", - "Resource": topic_arn, - }] + { + "Sid": "AWSConfigSNSPolicy", + "Effect": "Allow", + "Principal": {"Service": "cloudwatch.amazonaws.com"}, + "Action": "SNS:Publish", + "Resource": topic_arn, + } + ], } sns.set_topic_attributes( TopicArn=topic_arn, - AttributeName='Policy', - AttributeValue=json.dumps(topic_policy) + AttributeName="Policy", + AttributeValue=json.dumps(topic_policy), ) except Exception as e: - exit(f'ERROR: Failed to SetTopicAttributes for {topic_arn}: {str(e)}')", + exit(f"ERROR: Failed to SetTopicAttributes for {topic_arn}: {str(e)}")", }, "name": "CreateTopic", "outputs": [ @@ -1661,20 +1834,16 @@ def create_topic_policy(topic_arn): "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import boto3 import logging import os + +import boto3 from botocore.config import Config -boto_config = Config( - retries={ - 'max_attempts': 10, - 'mode': 'standard' - } -) +boto_config = Config(retries={"max_attempts": 10, "mode": "standard"}) log = logging.getLogger() -LOG_LEVEL = str(os.getenv('LogLevel', 'INFO')) +LOG_LEVEL = str(os.getenv("LogLevel", "INFO")) log.setLevel(LOG_LEVEL) @@ -1688,7 +1857,14 @@ def get_service_client(service_name): return boto3.client(service_name, config=boto_config) -def put_metric_filter(cw_log_group, filter_name, filter_pattern, metric_name, metric_namespace, metric_value): +def put_metric_filter( + cw_log_group, + filter_name, + filter_pattern, + metric_name, + metric_namespace, + metric_value, +): """ Puts the metric filter on the CloudWatch log group with provided values :param cw_log_group: Name of the CloudWatch log group @@ -1698,9 +1874,19 @@ def put_metric_filter(cw_log_group, filter_name, filter_pattern, metric_name, me :param metric_namespace: Namespace where metric is logged :param metric_value: Value to be logged for the metric """ - logs_client = get_service_client('logs') - log.debug("Putting the metric filter with values: {}".format([ - cw_log_group, filter_name, filter_pattern, metric_name, metric_namespace, metric_value])) + logs_client = get_service_client("logs") + log.debug( + "Putting the metric filter with values: {}".format( + [ + cw_log_group, + filter_name, + filter_pattern, + metric_name, + metric_namespace, + metric_value, + ] + ) + ) try: logs_client.put_metric_filter( logGroupName=cw_log_group, @@ -1708,19 +1894,21 @@ def put_metric_filter(cw_log_group, filter_name, filter_pattern, metric_name, me filterPattern=filter_pattern, metricTransformations=[ { - 'metricName': metric_name, - 'metricNamespace': metric_namespace, - 'metricValue': str(metric_value), - 'unit': 'Count' + "metricName": metric_name, + "metricNamespace": metric_namespace, + "metricValue": str(metric_value), + "unit": "Count", } - ] + ], ) except Exception as e: exit("Exception occurred while putting metric filter: " + str(e)) log.debug("Successfully added the metric filter.") -def put_metric_alarm(alarm_name, alarm_desc, alarm_threshold, metric_name, metric_namespace, topic_arn): +def put_metric_alarm( + alarm_name, alarm_desc, alarm_threshold, metric_name, metric_namespace, topic_arn +): """ Puts the metric alarm for the metric name with provided values :param alarm_name: Name for the alarm @@ -1729,30 +1917,29 @@ def put_metric_alarm(alarm_name, alarm_desc, alarm_threshold, metric_name, metri :param metric_name: Name of the metric :param metric_namespace: Namespace where metric is logged """ - cw_client = get_service_client('cloudwatch') - log.debug("Putting the metric alarm with values {}".format( - [alarm_name, alarm_desc, alarm_threshold, metric_name, metric_namespace])) + cw_client = get_service_client("cloudwatch") + log.debug( + "Putting the metric alarm with values {}".format( + [alarm_name, alarm_desc, alarm_threshold, metric_name, metric_namespace] + ) + ) try: cw_client.put_metric_alarm( AlarmName=alarm_name, AlarmDescription=alarm_desc, ActionsEnabled=True, - OKActions=[ - topic_arn - ], - AlarmActions=[ - topic_arn - ], + OKActions=[topic_arn], + AlarmActions=[topic_arn], MetricName=metric_name, Namespace=metric_namespace, - Statistic='Sum', + Statistic="Sum", Period=300, - Unit='Count', + Unit="Count", EvaluationPeriods=12, DatapointsToAlarm=1, Threshold=alarm_threshold, - ComparisonOperator='GreaterThanOrEqualToThreshold', - TreatMissingData='notBreaching' + ComparisonOperator="GreaterThanOrEqualToThreshold", + TreatMissingData="notBreaching", ) except Exception as e: exit("Exception occurred while putting metric alarm: " + str(e)) @@ -1764,23 +1951,37 @@ def verify(event, _): log.debug("====Print Event====") log.debug(event) - filter_name = event['FilterName'] - filter_pattern = event['FilterPattern'] - metric_name = event['MetricName'] - metric_namespace = event['MetricNamespace'] - metric_value = event['MetricValue'] - alarm_name = event['AlarmName'] - alarm_desc = event['AlarmDesc'] - alarm_threshold = event['AlarmThreshold'] - cw_log_group = event['LogGroupName'] - topic_arn = event['TopicArn'] - - put_metric_filter(cw_log_group, filter_name, filter_pattern, metric_name, metric_namespace, metric_value) - put_metric_alarm(alarm_name, alarm_desc, alarm_threshold, metric_name, metric_namespace, topic_arn) + filter_name = event["FilterName"] + filter_pattern = event["FilterPattern"] + metric_name = event["MetricName"] + metric_namespace = event["MetricNamespace"] + metric_value = event["MetricValue"] + alarm_name = event["AlarmName"] + alarm_desc = event["AlarmDesc"] + alarm_threshold = event["AlarmThreshold"] + cw_log_group = event["LogGroupName"] + topic_arn = event["TopicArn"] + + put_metric_filter( + cw_log_group, + filter_name, + filter_pattern, + metric_name, + metric_namespace, + metric_value, + ) + put_metric_alarm( + alarm_name, + alarm_desc, + alarm_threshold, + metric_name, + metric_namespace, + topic_arn, + ) return { "response": { "message": f'Created filter {event["FilterName"]} for metric {event["MetricName"]}, and alarm {event["AlarmName"]}', - "status": "Success" + "status": "Success", } }", }, @@ -2162,7 +2363,7 @@ Confirms the public accessibility setting is disabled on the cluster. * SubnetARN: (Required) The ARN of the Amazon EC2 Subnet. ## Security Standards / Controls -* AFSBP v1.0.0: EC2.15 +* AWS FSBP v1.0.0: EC2.15 ", "mainSteps": [ { @@ -2177,59 +2378,56 @@ Confirms the public accessibility setting is disabled on the cluster. # SPDX-License-Identifier: Apache-2.0 import boto3 from botocore.config import Config -from botocore.exceptions import ClientError - -boto_config = Config( - retries = { - 'mode': 'standard', - 'max_attempts': 10 - } - ) - - - + +boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) + + def connect_to_ec2(): - return boto3.client('ec2', config=boto_config) - + return boto3.client("ec2", config=boto_config) + + def lambda_handler(event, _): - """ - Disable public IP auto assignment on a subnet. - - \`event\` should have the following keys and values: - \`subnet_arn\`: the ARN of the subnet that has public IP auto assignment enabled. - - \`context\` is ignored - """ - - subnet_arn = event['subnet_arn'] - - subnet_id = subnet_arn.split('/')[1] - - disable_publicip_auto_assign(subnet_id) - - subnet_attributes = describe_subnet(subnet_id) - - public_ip_on_launch = subnet_attributes['Subnets'][0]['MapPublicIpOnLaunch'] - - if public_ip_on_launch == False: - return { - "MapPublicIpOnLaunch": public_ip_on_launch - } + """ + Disable public IP auto assignment on a subnet. + + \`event\` should have the following keys and values: + \`subnet_arn\`: the ARN of the subnet that has public IP auto assignment enabled. + + \`context\` is ignored + """ + + subnet_arn = event["subnet_arn"] + + subnet_id = subnet_arn.split("/")[1] + + disable_publicip_auto_assign(subnet_id) + + subnet_attributes = describe_subnet(subnet_id) + + public_ip_on_launch = subnet_attributes["Subnets"][0]["MapPublicIpOnLaunch"] + + if public_ip_on_launch is False: + return {"MapPublicIpOnLaunch": public_ip_on_launch} + + raise RuntimeError( + f"ASR Remediation failed - {subnet_id} did not have public IP auto assignment turned off." + ) + - raise RuntimeError(f'ASR Remediation failed - {subnet_id} did not have public IP auto assignment turned off.') - - def disable_publicip_auto_assign(subnet_id): - """ - Disables public IP Auto Assign on the subnet \`subnet_id\` - """ - ec2 = connect_to_ec2() - try: - ec2.modify_subnet_attribute(MapPublicIpOnLaunch={'Value':False},SubnetId=subnet_id) - - except Exception as e: - exit(f'There was an error turning off public IP auto assignment: '+str(e)) - + """ + Disables public IP Auto Assign on the subnet \`subnet_id\` + """ + ec2 = connect_to_ec2() + try: + ec2.modify_subnet_attribute( + MapPublicIpOnLaunch={"Value": False}, SubnetId=subnet_id + ) + + except Exception as e: + exit("There was an error turning off public IP auto assignment: " + str(e)) + + def describe_subnet(subnet_id): """ Grabs Subnet Attributes to verify subnet values were set as expected. @@ -2238,9 +2436,9 @@ def describe_subnet(subnet_id): try: subnet_attributes = ec2.describe_subnets(SubnetIds=[subnet_id]) return subnet_attributes - + except Exception as e: - exit(f'Failed to get attributes of subnet: '+str(e))", + exit("Failed to get attributes of subnet: " + str(e))", }, "name": "DisablePublicIPAutoAssign", "outputs": [ @@ -2277,37 +2475,322 @@ def describe_subnet(subnet_id): }, "Type": "AWS::SSM::Document", }, - "ASREnableAWSConfig": { + "ASRDisableTGWAutoAcceptSharedAttachments": { "DependsOn": [ - "CreateWait0", + "CreateWait10", ], "Properties": { "Content": { "assumeRole": "{{ AutomationAssumeRole }}", - "description": "### Document name - ASR-EnableAWSConfig + "description": "### Document name - ASR-DisableTGWAutoAcceptSharedAttachments ## What does this document do? -Enables AWS Config: -* Turns on recording for all resources. -* Creates an encrypted bucket for Config logging. -* Creates a logging bucket for access logs for the config bucket -* Creates an SNS topic for Config notifications -* Creates a service-linked role + This document turns off AutoAcceptSharedAttachments on a transit gateway to ensure that only authorized VPC attachment requests are accepted. + [ModifyTransitGateway](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_ModifyTransitGateway.html) API. + ## Input Parameters -* AutomationAssumeRole: (Required) The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that allows Systems Manager Automation to perform the actions on your behalf. -* KMSKeyArn: KMS Customer-managed key to use for encryption of Config log data and SNS Topic -* AWSServiceRoleForConfig: (Optional) The name of the exiting IAM role to use for the Config service. Default: aws-service-role/config.amazonaws.com/AWSServiceRoleForConfig -* SNSTopicName: (Required) Name of the SNS Topic to use to post AWS Config messages. +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +* TransitGatewayId: (Required) The Id of the transit gateway. -## Output Parameters -* Remediation.Output: STDOUT and messages from the remediation steps. +## Security Standards / Controls +* AFSBP v1.0.0: EC2.23 ", "mainSteps": [ { "action": "aws:executeScript", "inputs": { - "Handler": "create_encrypted_topic", + "Handler": "lambda_handler", + "InputPayload": { + "TransitGatewayId": "{{ TransitGatewayId }}", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import boto3 +from botocore.config import Config + +boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) + + +def connect_to_ec2(): + return boto3.client("ec2", config=boto_config) + + +def lambda_handler(event, _): + tgw_id = event["TransitGatewayId"] + + ec2 = connect_to_ec2() + + try: + ec2.modify_transit_gateway( + TransitGatewayId=tgw_id, Options={"AutoAcceptSharedAttachments": "disable"} + ) + + tgw_updated = ec2.describe_transit_gateways(TransitGatewayIds=[tgw_id]) + if ( + tgw_updated["TransitGateways"][0]["Options"]["AutoAcceptSharedAttachments"] + == "disable" + ): + return { + "response": { + "message": "Transit Gateway AutoAcceptSharedAttachments option disabled.", + "status": "Success", + } + } + else: + return { + "response": { + "message": "Failed to disable AutoAcceptSharedAttachments on Transit Gateway.", + "status": "Failed", + } + } + + except Exception as e: + exit("Failed to disable AutoAcceptSharedAttachments: " + str(e))", + }, + "maxAttempts": 3, + "name": "DisableTGWAutoAcceptSharedAttachments", + "outputs": [ + { + "Name": "Output", + "Selector": "$.Payload", + "Type": "StringMap", + }, + ], + "timeoutSeconds": 600, + }, + ], + "outputs": [ + "DisableTGWAutoAcceptSharedAttachments.Output", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "TransitGatewayId": { + "allowedPattern": "^tgw-[a-z0-9\\-]+$", + "description": "(Required) The Id of the Transit Gateway.", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-DisableTGWAutoAcceptSharedAttachments", + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ASRDisableUnrestrictedAccessToHighRiskPorts": { + "DependsOn": [ + "CreateWait9", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document name - ASR-DisableUnrestrictedAccessToHighRiskPorts + +## What does this document do? + This document disables unrestricted access to high risk ports using + [DescribeSecurityGroupRules](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeSecurityGroupRules.html) API, + [ModifySecurityGroupRules](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_ModifySecurityGroupRules.html) API. + + +## Input Parameters +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +* SecurityGroupId: (Required) The Id of the security group. + +## Security Standards / Controls +* AFSBP v1.0.0: EC2.19 +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "lambda_handler", + "InputPayload": { + "SecurityGroupId": "{{ SecurityGroupId }}", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import boto3 +from botocore.config import Config + +boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) + +# List of high risk ports to check for unrestricted access +PORTS_TO_CHECK = { + 20, + 21, + 22, + 23, + 25, + 110, + 135, + 143, + 445, + 1433, + 1434, + 3000, + 3306, + 3389, + 4333, + 5000, + 5432, + 5500, + 5601, + 8080, + 8088, + 8888, + 9200, + 9300, +} +# IPV4 and IPV6 open access +OPENIPV4 = "0.0.0.0/0" +OPENIPV6 = "::/0" +PROTOCOLS = {"tcp", "udp"} + + +def connect_to_ec2(): + return boto3.client("ec2", config=boto_config) + + +def lambda_handler(event, _): + security_group_id = event["SecurityGroupId"] + + ec2 = connect_to_ec2() + + try: + # Get the security group rules + security_group_rules = ec2.describe_security_group_rules( + Filters=[ + { + "Name": "group-id", + "Values": [ + security_group_id, + ], + }, + ], + ) + + # List to return rules that are deleted + rules_deleted = [] + + for rule in security_group_rules["SecurityGroupRules"]: + # Look for TCP or UDP ingress rules + if rule["IpProtocol"] in PROTOCOLS and not rule["IsEgress"]: + # Check for high risk ports + if any( + port in range(rule["FromPort"], rule["ToPort"] + 1) + for port in PORTS_TO_CHECK + ): + # Check for IPV4 open access + if "CidrIpv4" in rule and rule["CidrIpv4"] == OPENIPV4: + # Add rule to list + rules_deleted.append(rule["SecurityGroupRuleId"]) + # Delete the rule + ec2.revoke_security_group_ingress( + GroupId=security_group_id, + SecurityGroupRuleIds=[ + rule["SecurityGroupRuleId"], + ], + ) + + # Check for IPV6 open access + elif "CidrIpv6" in rule and rule["CidrIpv6"] == OPENIPV6: + # Add rule to list + rules_deleted.append(rule["SecurityGroupRuleId"]) + + # Delete the rule + ec2.revoke_security_group_ingress( + GroupId=security_group_id, + SecurityGroupRuleIds=[ + rule["SecurityGroupRuleId"], + ], + ) + + return { + "message": "Successfully removed security group rules on " + + security_group_id, + "status": "Success", + "rules_deleted": rules_deleted, + } + + except Exception as e: + exit("Failed to remove security group rules: " + str(e))", + }, + "maxAttempts": 3, + "name": "DisableUnrestrictedAccessToHighRiskPorts", + "outputs": [ + { + "Name": "Output", + "Selector": "$.Payload", + "Type": "StringMap", + }, + ], + "timeoutSeconds": 600, + }, + ], + "outputs": [ + "DisableUnrestrictedAccessToHighRiskPorts.Output", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "SecurityGroupId": { + "allowedPattern": "^sg-[a-z0-9\\-]+$", + "description": "(Required) The Id of the Seurity Group.", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-DisableUnrestrictedAccessToHighRiskPorts", + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ASREnableAWSConfig": { + "DependsOn": [ + "CreateWait0", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document name - ASR-EnableAWSConfig + +## What does this document do? +Enables AWS Config: +* Turns on recording for all resources. +* Creates an encrypted bucket for Config logging. +* Creates a logging bucket for access logs for the config bucket +* Creates an SNS topic for Config notifications +* Creates a service-linked role + +## Input Parameters +* AutomationAssumeRole: (Required) The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that allows Systems Manager Automation to perform the actions on your behalf. +* KMSKeyArn: KMS Customer-managed key to use for encryption of Config log data and SNS Topic +* AWSServiceRoleForConfig: (Optional) The name of the exiting IAM role to use for the Config service. Default: aws-service-role/config.amazonaws.com/AWSServiceRoleForConfig +* SNSTopicName: (Required) Name of the SNS Topic to use to post AWS Config messages. + +## Output Parameters +* Remediation.Output: STDOUT and messages from the remediation steps. +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "create_encrypted_topic", "InputPayload": { "kms_key_arn": "{{KMSKeyArn}}", "topic_name": "{{SNSTopicName}}", @@ -2316,94 +2799,90 @@ Enables AWS Config: "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import json + import boto3 from botocore.config import Config from botocore.exceptions import ClientError -boto_config = Config( - retries ={ - 'mode': 'standard' - } -) +boto_config = Config(retries={"mode": "standard"}) + def connect_to_sns(): - return boto3.client('sns', config=boto_config) + return boto3.client("sns", config=boto_config) + def connect_to_ssm(): - return boto3.client('ssm', config=boto_config) + return boto3.client("ssm", config=boto_config) -def create_encrypted_topic(event, _): - kms_key_arn = event['kms_key_arn'] +def create_encrypted_topic(event, _): + kms_key_arn = event["kms_key_arn"] new_topic = False - topic_arn = '' - topic_name = event['topic_name'] + topic_arn = "" + topic_name = event["topic_name"] try: sns = connect_to_sns() topic_arn = sns.create_topic( - Name=topic_name, - Attributes={ - 'KmsMasterKeyId': kms_key_arn.split('key/')[1] - } - )['TopicArn'] + Name=topic_name, Attributes={"KmsMasterKeyId": kms_key_arn.split("key/")[1]} + )["TopicArn"] new_topic = True except ClientError as client_exception: - exception_type = client_exception.response['Error']['Code'] - if exception_type == 'InvalidParameter': - print(f'Topic {topic_name} already exists. This remediation may have been run before.') - print('Ignoring exception - remediation continues.') - topic_arn = sns.create_topic( - Name=topic_name - )['TopicArn'] + exception_type = client_exception.response["Error"]["Code"] + if exception_type == "InvalidParameter": + print( + f"Topic {topic_name} already exists. This remediation may have been run before." + ) + print("Ignoring exception - remediation continues.") + topic_arn = sns.create_topic(Name=topic_name)["TopicArn"] else: - exit(f'ERROR: Unhandled client exception: {client_exception}') + exit(f"ERROR: Unhandled client exception: {client_exception}") except Exception as e: - exit(f'ERROR: could not create SNS Topic {topic_name}: {str(e)}') + exit(f"ERROR: could not create SNS Topic {topic_name}: {str(e)}") if new_topic: try: ssm = connect_to_ssm() ssm.put_parameter( - Name='/Solutions/SO0111/SNS_Topic_Config.1', - Description='SNS Topic for AWS Config updates', - Type='String', + Name="/Solutions/SO0111/SNS_Topic_Config.1", + Description="SNS Topic for AWS Config updates", + Type="String", Overwrite=True, - Value=topic_arn + Value=topic_arn, ) except Exception as e: - exit(f'ERROR: could not create SNS Topic {topic_name}: {str(e)}') + exit(f"ERROR: could not create SNS Topic {topic_name}: {str(e)}") create_topic_policy(topic_arn) return {"topic_arn": topic_arn} + def create_topic_policy(topic_arn): sns = connect_to_sns() try: topic_policy = { "Id": "Policy_ID", "Statement": [ - { - "Sid": "AWSConfigSNSPolicy", - "Effect": "Allow", - "Principal": { - "Service": "config.amazonaws.com" - }, - "Action": "SNS:Publish", - "Resource": topic_arn, - }] + { + "Sid": "AWSConfigSNSPolicy", + "Effect": "Allow", + "Principal": {"Service": "config.amazonaws.com"}, + "Action": "SNS:Publish", + "Resource": topic_arn, + } + ], } sns.set_topic_attributes( TopicArn=topic_arn, - AttributeName='Policy', - AttributeValue=json.dumps(topic_policy) + AttributeName="Policy", + AttributeValue=json.dumps(topic_policy), ) except Exception as e: - exit(f'ERROR: Failed to SetTopicAttributes for {topic_arn}: {str(e)}')", + exit(f"ERROR: Failed to SetTopicAttributes for {topic_arn}: {str(e)}")", }, "isEnd": false, "name": "CreateTopic", @@ -2442,67 +2921,62 @@ def create_topic_policy(topic_arn): "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import json + import boto3 from botocore.config import Config from botocore.exceptions import ClientError -from botocore.retries import bucket -boto_config = Config( - retries ={ - 'mode': 'standard' - } -) +boto_config = Config(retries={"mode": "standard"}) + def connect_to_s3(boto_config): - return boto3.client('s3', config=boto_config) + return boto3.client("s3", config=boto_config) + def create_bucket(bucket_name, aws_region): s3 = connect_to_s3(boto_config) try: - if aws_region == 'us-east-1': - s3.create_bucket( - ACL='private', - Bucket=bucket_name - ) + if aws_region == "us-east-1": + s3.create_bucket(ACL="private", Bucket=bucket_name) else: s3.create_bucket( - ACL='private', + ACL="private", Bucket=bucket_name, - CreateBucketConfiguration={ - 'LocationConstraint': aws_region - } + CreateBucketConfiguration={"LocationConstraint": aws_region}, ) return "created" except ClientError as ex: - exception_type = ex.response['Error']['Code'] + exception_type = ex.response["Error"]["Code"] # bucket already exists - return if exception_type in ["BucketAlreadyExists", "BucketAlreadyOwnedByYou"]: - print('Bucket ' + bucket_name + ' already exists') + print("Bucket " + bucket_name + " already exists") return "already exists" else: - exit(f'ERROR creating bucket {bucket_name}: {str(ex)}') + exit(f"ERROR creating bucket {bucket_name}: {str(ex)}") except Exception as e: - exit(f'ERROR creating bucket {bucket_name}: {str(e)}') + exit(f"ERROR creating bucket {bucket_name}: {str(e)}") + def encrypt_bucket(bucket_name, kms_key): s3 = connect_to_s3(boto_config) try: s3.put_bucket_encryption( - Bucket=bucket_name, - ServerSideEncryptionConfiguration={ - 'Rules': [ - { - 'ApplyServerSideEncryptionByDefault': { - 'SSEAlgorithm': 'aws:kms', - 'KMSMasterKeyID': kms_key - } - } - ] - } - ) + Bucket=bucket_name, + ServerSideEncryptionConfiguration={ + "Rules": [ + { + "ApplyServerSideEncryptionByDefault": { + "SSEAlgorithm": "aws:kms", + "KMSMasterKeyID": kms_key, + } + } + ] + }, + ) except Exception as e: - exit(f'ERROR putting bucket encryption for {bucket_name}: {str(e)}') + exit(f"ERROR putting bucket encryption for {bucket_name}: {str(e)}") + def block_public_access(bucket_name): s3 = connect_to_s3(boto_config) @@ -2510,14 +2984,15 @@ def block_public_access(bucket_name): s3.put_public_access_block( Bucket=bucket_name, PublicAccessBlockConfiguration={ - 'BlockPublicAcls': True, - 'IgnorePublicAcls': True, - 'BlockPublicPolicy': True, - 'RestrictPublicBuckets': True - } + "BlockPublicAcls": True, + "IgnorePublicAcls": True, + "BlockPublicPolicy": True, + "RestrictPublicBuckets": True, + }, ) except Exception as e: - exit(f'ERROR setting public access block for bucket {bucket_name}: {str(e)}') + exit(f"ERROR setting public access block for bucket {bucket_name}: {str(e)}") + def enable_access_logging(bucket_name, logging_bucket): s3 = connect_to_s3(boto_config) @@ -2525,14 +3000,15 @@ def enable_access_logging(bucket_name, logging_bucket): s3.put_bucket_logging( Bucket=bucket_name, BucketLoggingStatus={ - 'LoggingEnabled': { - 'TargetBucket': logging_bucket, - 'TargetPrefix': f'access-logs/{bucket_name}' - } - } + "LoggingEnabled": { + "TargetBucket": logging_bucket, + "TargetPrefix": f"access-logs/{bucket_name}", + } + }, ) except Exception as e: - exit(f'Error setting access logging for bucket {bucket_name}: {str(e)}') + exit(f"Error setting access logging for bucket {bucket_name}: {str(e)}") + def create_bucket_policy(config_bucket, aws_partition): s3 = connect_to_s3(boto_config) @@ -2540,66 +3016,53 @@ def create_bucket_policy(config_bucket, aws_partition): bucket_policy = { "Version": "2012-10-17", "Statement": [ - { - "Sid": "AWSConfigBucketPermissionsCheck", - "Effect": "Allow", - "Principal": { - "Service": [ - "config.amazonaws.com" #NOSONAR - ] + { + "Sid": "AWSConfigBucketPermissionsCheck", + "Effect": "Allow", + "Principal": {"Service": ["config.amazonaws.com"]}, # NOSONAR + "Action": "s3:GetBucketAcl", + "Resource": "arn:" + aws_partition + ":s3:::" + config_bucket, }, - "Action": "s3:GetBucketAcl", - "Resource": "arn:" + aws_partition + ":s3:::" + config_bucket - }, - { - "Sid": "AWSConfigBucketExistenceCheck", - "Effect": "Allow", - "Principal": { - "Service": [ - "config.amazonaws.com" - ] + { + "Sid": "AWSConfigBucketExistenceCheck", + "Effect": "Allow", + "Principal": {"Service": ["config.amazonaws.com"]}, + "Action": "s3:ListBucket", + "Resource": "arn:" + aws_partition + ":s3:::" + config_bucket, }, - "Action": "s3:ListBucket", - "Resource": "arn:" + aws_partition + ":s3:::" + config_bucket - }, - { - "Sid": "AWSConfigBucketDelivery", - "Effect": "Allow", - "Principal": { - "Service": [ - "config.amazonaws.com" - ] + { + "Sid": "AWSConfigBucketDelivery", + "Effect": "Allow", + "Principal": {"Service": ["config.amazonaws.com"]}, + "Action": "s3:PutObject", + "Resource": "arn:" + + aws_partition + + ":s3:::" + + config_bucket + + "/*", + "Condition": { + "StringEquals": {"s3:x-amz-acl": "bucket-owner-full-control"} + }, }, - "Action": "s3:PutObject", - "Resource": "arn:" + aws_partition + ":s3:::" + config_bucket + "/*", - "Condition": { - "StringEquals": { - "s3:x-amz-acl": "bucket-owner-full-control" - } - } - } - ] + ], } - s3.put_bucket_policy( - Bucket=config_bucket, - Policy=json.dumps(bucket_policy) - ) + s3.put_bucket_policy(Bucket=config_bucket, Policy=json.dumps(bucket_policy)) except Exception as e: - exit(f'ERROR: PutBucketPolicy failed for {config_bucket}: {str(e)}') + exit(f"ERROR: PutBucketPolicy failed for {config_bucket}: {str(e)}") -def create_encrypted_bucket(event, _): - kms_key_arn = event['kms_key_arn'] - aws_partition = event['partition'] - aws_account = event['account'] - aws_region = event['region'] - logging_bucket = event['logging_bucket'] - bucket_name = 'so0111-aws-config-' + aws_region + '-' + aws_account +def create_encrypted_bucket(event, _): + kms_key_arn = event["kms_key_arn"] + aws_partition = event["partition"] + aws_account = event["account"] + aws_region = event["region"] + logging_bucket = event["logging_bucket"] + bucket_name = "so0111-aws-config-" + aws_region + "-" + aws_account - if create_bucket(bucket_name, aws_region) == 'already exists': + if create_bucket(bucket_name, aws_region) == "already exists": return {"config_bucket": bucket_name} - encrypt_bucket(bucket_name, kms_key_arn.split('key/')[1]) + encrypt_bucket(bucket_name, kms_key_arn.split("key/")[1]) block_public_access(bucket_name) enable_access_logging(bucket_name, logging_bucket) create_bucket_policy(bucket_name, aws_partition) @@ -2635,78 +3098,88 @@ import boto3 from botocore.config import Config from botocore.exceptions import ClientError -boto_config = Config( - retries ={ - 'mode': 'standard' - } -) +boto_config = Config(retries={"mode": "standard"}) + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def create_config_recorder(aws_partition, aws_account, aws_service_role): cfgsvc = connect_to_config(boto_config) try: - config_service_role_arn = 'arn:' + aws_partition + ':iam::' + aws_account + ':role/' + aws_service_role + config_service_role_arn = ( + "arn:" + + aws_partition + + ":iam::" + + aws_account + + ":role/" + + aws_service_role + ) cfgsvc.put_configuration_recorder( ConfigurationRecorder={ - 'name': 'default', - 'roleARN': config_service_role_arn, - 'recordingGroup': { - 'allSupported': True, - 'includeGlobalResourceTypes': True - } + "name": "default", + "roleARN": config_service_role_arn, + "recordingGroup": { + "allSupported": True, + "includeGlobalResourceTypes": True, + }, } ) except ClientError as ex: - exception_type = ex.response['Error']['Code'] + exception_type = ex.response["Error"]["Code"] # recorder already exists - continue if exception_type in ["MaxNumberOfConfigurationRecordersExceededException"]: - print('Config Recorder already exists. Continuing.') + print("Config Recorder already exists. Continuing.") else: - exit(f'ERROR: Boto3 ClientError enabling Config: {exception_type} - {str(ex)}') + exit( + f"ERROR: Boto3 ClientError enabling Config: {exception_type} - {str(ex)}" + ) except Exception as e: - exit(f'ERROR enabling AWS Config - create_config_recorder: {str(e)}') + exit(f"ERROR enabling AWS Config - create_config_recorder: {str(e)}") + def create_delivery_channel(config_bucket, aws_account, topic_arn): cfgsvc = connect_to_config(boto_config) try: cfgsvc.put_delivery_channel( DeliveryChannel={ - 'name': 'default', - 's3BucketName': config_bucket, - 's3KeyPrefix': aws_account, - 'snsTopicARN': topic_arn, - 'configSnapshotDeliveryProperties': { - 'deliveryFrequency': 'Twelve_Hours' - } + "name": "default", + "s3BucketName": config_bucket, + "s3KeyPrefix": aws_account, + "snsTopicARN": topic_arn, + "configSnapshotDeliveryProperties": { + "deliveryFrequency": "Twelve_Hours" + }, } ) except ClientError as ex: - exception_type = ex.response['Error']['Code'] + exception_type = ex.response["Error"]["Code"] # delivery channel already exists - return if exception_type in ["MaxNumberOfDeliveryChannelsExceededException"]: - print('DeliveryChannel already exists') + print("DeliveryChannel already exists") else: - exit(f'ERROR: Boto3 ClientError enabling Config: {exception_type} - {str(ex)}') + exit( + f"ERROR: Boto3 ClientError enabling Config: {exception_type} - {str(ex)}" + ) except Exception as e: - exit(f'ERROR enabling AWS Config - create_delivery_channel: {str(e)}') + exit(f"ERROR enabling AWS Config - create_delivery_channel: {str(e)}") + def start_recorder(): cfgsvc = connect_to_config(boto_config) try: - cfgsvc.start_configuration_recorder( - ConfigurationRecorderName='default' - ) + cfgsvc.start_configuration_recorder(ConfigurationRecorderName="default") except Exception as e: - exit(f'ERROR enabling AWS Config: {str(e)}') + exit(f"ERROR enabling AWS Config: {str(e)}") + def enable_config(event, _): - aws_account = event['account'] - aws_partition = event['partition'] - aws_service_role = event['aws_service_role'] - config_bucket = event['config_bucket'] - topic_arn = event['topic_arn'] + aws_account = event["account"] + aws_partition = event["partition"] + aws_service_role = event["aws_service_role"] + config_bucket = event["config_bucket"] + topic_arn = event["topic_arn"] create_config_recorder(aws_partition, aws_account, aws_service_role) create_delivery_channel(config_bucket, aws_account, topic_arn) @@ -2737,13 +3210,12 @@ def enable_config(event, _): def process_results(event, _): print(f'Created encrypted SNS topic {event["sns_topic_arn"]}') print(f'Created encrypted Config bucket {event["config_bucket"]}') - print(f'Created access logging for Config bucket in bucket {event["logging_bucket"]}') - print('Enabled AWS Config by creating a default recorder') + print( + f'Created access logging for Config bucket in bucket {event["logging_bucket"]}' + ) + print("Enabled AWS Config by creating a default recorder") return { - "response": { - "message": "AWS Config successfully enabled", - "status": "Success" - } + "response": {"message": "AWS Config successfully enabled", "status": "Success"} }", }, "isEnd": true, @@ -2812,7 +3284,7 @@ This runbook enables health checks for the Amazon EC2 Auto Scaling (Auto Scaling * Remediation.Output - stdout messages from the remediation ## Security Standards / Controls -* AFSBP v1.0.0: Autoscaling.1 +* AWS FSBP v1.0.0: Autoscaling.1 * CIS v1.2.0: 2.1 * PCI: Autoscaling.1 ", @@ -2847,84 +3319,215 @@ This runbook enables health checks for the Amazon EC2 Auto Scaling (Auto Scaling "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import json + import boto3 from botocore.config import Config -from botocore.exceptions import ClientError -def connect_to_autoscaling(boto_config): - return boto3.client('autoscaling', config=boto_config) -def verify(event, _): +def connect_to_autoscaling(boto_config): + return boto3.client("autoscaling", config=boto_config) + + +def verify(event, _): + boto_config = Config(retries={"mode": "standard"}) + asg_client = connect_to_autoscaling(boto_config) + asg_name = event["AsgName"] + try: + desc_asg = asg_client.describe_auto_scaling_groups( + AutoScalingGroupNames=[asg_name] + ) + if len(desc_asg["AutoScalingGroups"]) < 1: + exit(f"No AutoScaling Group found matching {asg_name}") + + health_check = desc_asg["AutoScalingGroups"][0]["HealthCheckType"] + print(json.dumps(desc_asg["AutoScalingGroups"][0], default=str)) + if health_check == "ELB": + return { + "response": { + "message": "Autoscaling Group health check type updated to ELB", + "status": "Success", + } + } + else: + return { + "response": { + "message": "Autoscaling Group health check type is not ELB", + "status": "Failed", + } + } + except Exception as e: + exit("Exception while executing remediation: " + str(e))", + }, + "name": "Remediation", + "outputs": [ + { + "Name": "Output", + "Selector": "$.Payload.response", + "Type": "StringMap", + }, + ], + }, + ], + "outputs": [ + "Remediation.Output", + ], + "parameters": { + "AutoScalingGroupName": { + "allowedPattern": "^.{1,255}$", + "description": "(Required) The Amazon Resource Name (ARN) of the auto scaling group that you want to enable health checks on.", + "type": "String", + }, + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "HealthCheckGracePeriod": { + "allowedPattern": "^[0-9]\\d*$", + "default": 300, + "description": "(Optional) The amount of time, in seconds, that Auto Scaling waits before checking the health status of an Amazon Elastic Compute Cloud (Amazon EC2) instance that has come into service.", + "type": "Integer", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-EnableAutoScalingGroupELBHealthCheck", + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ASREnableAutoSecretRotation": { + "DependsOn": [ + "CreateWait10", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document name - ASR-EnableAutoSecretRotation + +## What does this document do? + This document enables automatic rotation on a Secrets Manager secret if a Lambda function is already associated with it. + [RotateSecret](https://docs.aws.amazon.com/secretsmanager/latest/apireference/API_RotateSecret.html) API. + + +## Input Parameters +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +* SecretARN: (Required) The ARN of the Secrets Manager secret. +* MaximumAllowedRotationFrequency: (Optional) The number of days that a secret must be automatically rotated within. + +## Security Standards / Controls +* AFSBP v1.0.0: SecretsManager.1 +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "lambda_handler", + "InputPayload": { + "MaximumAllowedRotationFrequency": "{{ MaximumAllowedRotationFrequency }}", + "SecretARN": "{{ SecretARN }}", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +import boto3 +from botocore.config import Config + +BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + + +def connect_to_secretsmanager(): + return boto3.client("secretsmanager", config=BOTO_CONFIG) + + +# Check if secret rotation is enabled on the secet. +def check_secret_rotation(secret_arn, secretsmanager_client): + response = secretsmanager_client.describe_secret(SecretId=secret_arn) + if "RotationEnabled" in response: + if response["RotationEnabled"]: + return True + else: + return False + + +def lambda_handler(event, _): + secret_arn = event["SecretARN"] + number_of_days = event["MaximumAllowedRotationFrequency"] + + secretsmanager = connect_to_secretsmanager() - boto_config = Config( - retries ={ - 'mode': 'standard' - } - ) - asg_client = connect_to_autoscaling(boto_config) - asg_name = event['AsgName'] try: - desc_asg = asg_client.describe_auto_scaling_groups( - AutoScalingGroupNames=[asg_name] + # Set rotation schedule following best practices + secretsmanager.rotate_secret( + SecretId=secret_arn, + RotationRules={ + "AutomaticallyAfterDays": int(number_of_days), + }, + RotateImmediately=False, ) - if len(desc_asg['AutoScalingGroups']) < 1: - exit(f'No AutoScaling Group found matching {asg_name}') - health_check = desc_asg['AutoScalingGroups'][0]['HealthCheckType'] - print(json.dumps(desc_asg['AutoScalingGroups'][0], default=str)) - if (health_check == 'ELB'): + # Verify secret rotation is enabled. + if check_secret_rotation(secret_arn, secretsmanager): return { - "response": { - "message": "Autoscaling Group health check type updated to ELB", - "status": "Success" - } + "message": f"Enabled automatic secret rotation every {number_of_days} days with previously set rotation function.", + "status": "Success", } else: + raise RuntimeError( + "Failed to set automatic rotation schedule. Please manually set rotation on the secret." + ) + + # If a Lambda function ARN is not associated, an exception will be thrown. + except Exception as e: + # Verify secret rotation is enabled. + if check_secret_rotation(secret_arn, secretsmanager): return { - "response": { - "message": "Autoscaling Group health check type is not ELB", - "status": "Failed" - } + "message": f"Enabled automatic secret rotation every {number_of_days} days with previously set function.", + "status": "Success", } - except Exception as e: - exit("Exception while executing remediation: " + str(e))", + else: + exit(f"Error when setting automatic rotation schedule: {str(e)}")", }, - "name": "Remediation", + "maxAttempts": 3, + "name": "EnableAutoSecretRotation", "outputs": [ { "Name": "Output", - "Selector": "$.Payload.response", + "Selector": "$.Payload", "Type": "StringMap", }, ], + "timeoutSeconds": 600, }, ], "outputs": [ - "Remediation.Output", + "EnableAutoSecretRotation.Output", ], "parameters": { - "AutoScalingGroupName": { - "allowedPattern": "^.{1,255}$", - "description": "(Required) The Amazon Resource Name (ARN) of the auto scaling group that you want to enable health checks on.", - "type": "String", - }, "AutomationAssumeRole": { "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role/[\\w+=,.@-]+$", "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", "type": "String", }, - "HealthCheckGracePeriod": { - "allowedPattern": "^[0-9]\\d*$", - "default": 300, - "description": "(Optional) The amount of time, in seconds, that Auto Scaling waits before checking the health status of an Amazon Elastic Compute Cloud (Amazon EC2) instance that has come into service.", + "MaximumAllowedRotationFrequency": { + "default": 90, + "description": "(Optional) The number of days that a secret must be automatically rotated within.", "type": "Integer", }, + "SecretARN": { + "allowedPattern": "^arn:(?:aws|aws-cn|aws-us-gov):secretsmanager:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:secret:([A-Za-z0-9\\/_+=.@-]+)$", + "description": "(Required) The ARN of the Secrets Manager secret.", + "type": "String", + }, }, "schemaVersion": "0.3", }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-EnableAutoScalingGroupELBHealthCheck", + "Name": "ASR-EnableAutoSecretRotation", "UpdateMethod": "NewVersion", }, "Type": "AWS::SSM::Document", @@ -3177,47 +3780,411 @@ The runbook enables automatic version upgrade on a Redshift cluster. return str(event['AllowVersionUpgrade']) ", }, - "name": "CastAllowVersionUpgradeToString", + "name": "CastAllowVersionUpgradeToString", + "outputs": [ + { + "Name": "AllowVersionUpgradeString", + "Selector": "$.Payload", + "Type": "String", + }, + ], + }, + { + "action": "aws:assertAwsResourceProperty", + "inputs": { + "Api": "DescribeClusters", + "ClusterIdentifier": "{{ClusterIdentifier}}", + "DesiredValues": [ + "{{CastAllowVersionUpgradeToString.AllowVersionUpgradeString}}", + ], + "PropertySelector": "$.Clusters[0].AllowVersionUpgrade", + "Service": "redshift", + }, + "isEnd": true, + "name": "VerifyAutomaticVersionUpgrade", + }, + ], + "outputs": [ + "EnableAutomaticVersionUpgrade.Response", + ], + "parameters": { + "AllowVersionUpgrade": { + "default": true, + "description": "(Optional) Whether to allow version upgrade on the cluster.", + "type": "Boolean", + }, + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "ClusterIdentifier": { + "allowedPattern": "^(?!.*--)[a-z][a-z0-9-]{0,62}(? str: + """Get the SNS topic arn that will be used to configure notifications, creating it if it does not already exist""" + sns: SNSClient = boto3.client("sns", config=boto_config) + # get partition and region to buildArn here, replace sourceArn under condition + session = boto3.session.Session() + region = session.region_name + partition = partition_from_region(session) + expected_topic_arn = f"arn:{partition}:sns:{region}:{account_id}:{topic_name}" + policy = { + "Version": "2012-10-17", + "Id": "ASR Notification Policy", + "Statement": [ + { + "Sid": bucket_name + " ASR Notification Policy", + "Effect": "Allow", + "Principal": {"Service": "s3.amazonaws.com"}, + "Action": ["SNS:Publish"], + "Resource": expected_topic_arn, + "Condition": { + "ArnLike": { + "aws:SourceArn": [f"arn:{partition}:s3:::" + bucket_name] + }, + "StringEquals": {"aws:SourceAccount": [account_id]}, + }, + } + ], + } + + try: + topic_attributes = sns.get_topic_attributes(TopicArn=expected_topic_arn) + topic_attributes_policy = topic_attributes["Attributes"]["Policy"] # str + topic_attributes_policy_dict = json.loads(topic_attributes_policy) # dict + for statement in topic_attributes_policy_dict["Statement"]: + if statement["Sid"] == bucket_name + " ASR Notification Policy": + return expected_topic_arn + topic_attributes_policy_dict["Statement"].append(policy["Statement"][0]) + new_topic_attributes_policy = json.dumps(topic_attributes_policy_dict) + response = sns.set_topic_attributes( + TopicArn=expected_topic_arn, + AttributeName="Policy", + AttributeValue=new_topic_attributes_policy, + ) + return expected_topic_arn + except Exception: + string_policy = json.dumps(policy) + response = sns.create_topic( + Name=topic_name, + Attributes={"Policy": string_policy}, + ) + return response["TopicArn"] + + +def configure_notifications( + bucket_name: str, topic_arn: str, event_types: List[str] +) -> None: + """Configure the bucket \`bucket_name\` to notify the sns topic with ARN \`topic_arn\`""" + s3 = boto3.client("s3", config=boto_config) + s3.put_bucket_notification_configuration( + Bucket=bucket_name, + NotificationConfiguration={ + "TopicConfigurations": [ + { + "Id": "ASR Bucket Notification Topic Config", + "Events": event_types, + "TopicArn": topic_arn, + } + ] + }, + ) + + +def assert_bucket_notifcations_configured(bucket_name, account_id): + """ + Verify that the bucket \`bucket_name\` is configured to update the SNS topic + with ARN \`topic_arn\` + """ + s3 = boto3.client("s3", config=boto_config) + notification_configuration = s3.get_bucket_notification_configuration( + Bucket=bucket_name, ExpectedBucketOwner=account_id + ) + try: + return { + "NotificationARNs": notification_configuration["TopicConfigurations"][0][ + "TopicArn" + ] + } + except Exception: + raise RuntimeError( + f"ERROR: {bucket_name} was not configured with notifications" + )", + }, + "isEnd": true, + "name": "EnableBucketEventNotifications", + "outputs": [ + { + "Name": "Output", + "Selector": "$.Payload.output", + "Type": "StringMap", + }, + ], + "timeoutSeconds": 600, + }, + ], + "outputs": [ + "EnableBucketEventNotifications.Output", + ], + "parameters": { + "AccountId": { + "allowedPattern": "^[0-9]{12}$", + "description": "Account ID of the account for the finding", + "type": "String", + }, + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "BucketName": { + "allowedPattern": "(?=^.{3,63}$)(?!^(\\d+\\.)+\\d+$)(^(([a-z0-9]|[a-z0-9][a-z0-9\\-]*[a-z0-9])\\.)*([a-z0-9]|[a-z0-9][a-z0-9\\-]*[a-z0-9])$)", + "description": "(Required) The name of the S3 Bucket.", + "type": "String", + }, + "EventTypes": { + "default": [ + "s3:ReducedRedundancyLostObject", + "s3:ObjectCreated:*", + "s3:ObjectRemoved:*", + "s3:ObjectRestore:*", + "s3:Replication:*", + "s3:LifecycleExpiration:*", + "s3:LifecycleTransition", + "s3:IntelligentTiering", + "s3:ObjectTagging:*", + "s3:ObjectAcl:Put", + ], + "description": "(Optional) The event types to add notifications for.", + "type": "StringList", + }, + "TopicName": { + "allowedPattern": "^[a-zA-Z0-9][a-zA-Z0-9-_]{0,255}$", + "default": "SO0111-ASR-S3BucketNotifications", + "description": "(Optional) The name of the SNS topic to create and configure for notifications.", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-EnableBucketEventNotifications", + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ASREnableCloudFrontDefaultRootObject": { + "DependsOn": [ + "CreateWait8", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document name - AWSConfigRemediation-EnableCloudFrontDefaultRootObject + +## What does this document do? +This runbook configures the default root object for the Amazon CloudFront distribution you specify using the [UpdateDistribution](https://docs.aws.amazon.com/cloudfront/latest/APIReference/API_UpdateDistribution.html) API. + +## Input Parameters +* CloudFrontDistribution: (Required) The ARN of the CloudFront distribution you want to configure the default root object for. +* DefaultRootObject: (Required) The object that you want CloudFront to return when a viewer request points to your root URL. +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Output Parameters +* UpdateDistributionAndVerify.Output: The standard HTTP response from the UpdateDistribution API.", + "mainSteps": [ + { + "action": "aws:executeScript", + "description": "## UpdateDistributionAndVerify +Configures the default root object for the CloudFront distribution you specify in the CloudFrontDistributionId parameter and verifies it's successful modification. +## outputs +* Output: The standard HTTP response from the UpdateDistribution API. +", + "inputs": { + "Handler": "handler", + "InputPayload": { + "cloudfront_distribution": "{{ CloudFrontDistribution }}", + "root_object": "{{ DefaultRootObject }}", + }, + "Runtime": "python3.8", + "Script": "import datetime +import json + +import boto3 + + +def default(obj): + if isinstance(obj, (datetime.date, datetime.datetime)): + return obj.isoformat() + else: + raise TypeError("Incorrect HTTPResponse format.") + + +def verify_enable_cloudfront_default_root_object( + cloudfront_client, cloudfront_distribution +): + response = cloudfront_client.get_distribution_config(Id=cloudfront_distribution) + if response["DistributionConfig"]["DefaultRootObject"]: + return "Verification of 'EnableCloudFrontDefaultRootObject' is successful." + error = f"VERIFICATION FAILED. DEFAULT ROOT OBJECT FOR AMAZON CLOUDFRONT DISTRIBUTION {cloudfront_distribution} IS NOT SET." + raise RuntimeError(error) + + +def handler(event, _): + cloudfront_client = boto3.client("cloudfront") + cloudfront_distribution_arn = event["cloudfront_distribution"] + cloudfront_distribution_id = cloudfront_distribution_arn.split("/")[1] + response = cloudfront_client.get_distribution_config(Id=cloudfront_distribution_id) + response["DistributionConfig"]["DefaultRootObject"] = event["root_object"] + update_response = cloudfront_client.update_distribution( + DistributionConfig=response["DistributionConfig"], + Id=cloudfront_distribution_id, + IfMatch=response["ETag"], + ) + output = verify_enable_cloudfront_default_root_object( + cloudfront_client, cloudfront_distribution_id + ) + return { + "Output": { + "Message": output, + "HTTPResponse": json.dumps(update_response, default=default), + } + }", + }, + "isEnd": true, + "name": "UpdateDistributionAndVerify", "outputs": [ { - "Name": "AllowVersionUpgradeString", - "Selector": "$.Payload", - "Type": "String", + "Name": "Output", + "Selector": "$.Payload.Output", + "Type": "StringMap", }, ], - }, - { - "action": "aws:assertAwsResourceProperty", - "inputs": { - "Api": "DescribeClusters", - "ClusterIdentifier": "{{ClusterIdentifier}}", - "DesiredValues": [ - "{{CastAllowVersionUpgradeToString.AllowVersionUpgradeString}}", - ], - "PropertySelector": "$.Clusters[0].AllowVersionUpgrade", - "Service": "redshift", - }, - "isEnd": true, - "name": "VerifyAutomaticVersionUpgrade", + "timeoutSeconds": 600, }, ], "outputs": [ - "EnableAutomaticVersionUpgrade.Response", + "UpdateDistributionAndVerify.Output", ], "parameters": { - "AllowVersionUpgrade": { - "default": true, - "description": "(Optional) Whether to allow version upgrade on the cluster.", - "type": "Boolean", - }, "AutomationAssumeRole": { "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role/[\\w+=,.@-]+$", "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", "type": "String", }, - "ClusterIdentifier": { - "allowedPattern": "^(?!.*--)[a-z][a-z0-9-]{0,62}(?= 1: for existing_loggroup in log_group_verification: - if existing_loggroup['logGroupName'] == group: + if existing_loggroup["logGroupName"] == group: return 1 return 0 except Exception as e: - exit(f'EnableVPCFlowLogs failed - unhandled exception {str(e)}') + exit(f"EnableVPCFlowLogs failed - unhandled exception {str(e)}") + def wait_for_seconds(wait_interval): time.sleep(wait_interval) + def wait_for_loggroup(client, wait_interval, max_retries, loggroup): attempts = 1 while not log_group_exists(client, loggroup): wait_for_seconds(wait_interval) attempts += 1 if attempts > max_retries: - exit(f'Timeout waiting for log group {loggroup} to become active') + exit(f"Timeout waiting for log group {loggroup} to become active") + def flowlogs_active(client, loggroup): # searches for flow log status, filtered on unique CW Log Group created earlier @@ -5458,19 +6817,17 @@ def flowlogs_active(client, loggroup): flow_status = client.describe_flow_logs( DryRun=False, Filters=[ - { - 'Name': 'log-group-name', - 'Values': [loggroup] - }, - ] - )['FlowLogs'] - if len(flow_status) == 1 and flow_status[0]['FlowLogStatus'] == 'ACTIVE': + {"Name": "log-group-name", "Values": [loggroup]}, + ], + )["FlowLogs"] + if len(flow_status) == 1 and flow_status[0]["FlowLogStatus"] == "ACTIVE": return 1 else: return 0 except Exception as e: - exit(f'EnableVPCFlowLogs failed - unhandled exception {str(e)}') + exit(f"EnableVPCFlowLogs failed - unhandled exception {str(e)}") + def wait_for_flowlogs(client, wait_interval, max_retries, loggroup): attempts = 1 @@ -5478,7 +6835,10 @@ def wait_for_flowlogs(client, wait_interval, max_retries, loggroup): wait_for_seconds(wait_interval) attempts += 1 if attempts > max_retries: - exit(f'Timeout waiting for flowlogs to log group {loggroup} to become active') + exit( + f"Timeout waiting for flowlogs to log group {loggroup} to become active" + ) + def enable_flow_logs(event, _): """ @@ -5486,43 +6846,42 @@ def enable_flow_logs(event, _): On success returns a string map On failure returns NoneType """ - max_retries = event.get('retries', 12) # max number of waits for actions to complete. - wait_interval = event.get('wait', 5) # how many seconds between attempts + max_retries = event.get( + "retries", 12 + ) # max number of waits for actions to complete. + wait_interval = event.get("wait", 5) # how many seconds between attempts - boto_config_args = { - 'retries': { - 'mode': 'standard' - } - } - - boto_config = Config(**boto_config_args) + boto_config = Config(retries={"mode": "standard"}) - if 'vpc' not in event or 'remediation_role' not in event or 'kms_key_arn' not in event: - exit('Error: missing vpc from input') + if ( + "vpc" not in event + or "remediation_role" not in event + or "kms_key_arn" not in event + ): + exit("Error: missing vpc from input") logs_client = connect_to_logs(boto_config) ec2_client = connect_to_ec2(boto_config) - kms_key_arn = event['kms_key_arn'] # for logs encryption at rest + kms_key_arn = event["kms_key_arn"] # for logs encryption at rest # set dynamic variable for CW Log Group for VPC Flow Logs - vpc_flow_loggroup = "VPCFlowLogs/" + event['vpc'] + vpc_flow_loggroup = "VPCFlowLogs/" + event["vpc"] # create cloudwatch log group try: logs_client.create_log_group( - logGroupName=vpc_flow_loggroup, - kmsKeyId=kms_key_arn + logGroupName=vpc_flow_loggroup, kmsKeyId=kms_key_arn ) except ClientError as client_error: - exception_type = client_error.response['Error']['Code'] + exception_type = client_error.response["Error"]["Code"] if exception_type in ["ResourceAlreadyExistsException"]: - print(f'CloudWatch Logs group {vpc_flow_loggroup} already exists') + print(f"CloudWatch Logs group {vpc_flow_loggroup} already exists") else: - exit(f'ERROR CREATING LOGGROUP {vpc_flow_loggroup}: {str(exception_type)}') + exit(f"ERROR CREATING LOGGROUP {vpc_flow_loggroup}: {str(exception_type)}") except Exception as e: - exit(f'ERROR CREATING LOGGROUP {vpc_flow_loggroup}: {str(e)}') + exit(f"ERROR CREATING LOGGROUP {vpc_flow_loggroup}: {str(e)}") # wait for CWL creation to propagate wait_for_loggroup(logs_client, wait_interval, max_retries, vpc_flow_loggroup) @@ -5531,27 +6890,27 @@ def enable_flow_logs(event, _): try: ec2_client.create_flow_logs( DryRun=False, - DeliverLogsPermissionArn=event['remediation_role'], + DeliverLogsPermissionArn=event["remediation_role"], LogGroupName=vpc_flow_loggroup, - ResourceIds=[event['vpc']], - ResourceType='VPC', - TrafficType='REJECT', - LogDestinationType='cloud-watch-logs' + ResourceIds=[event["vpc"]], + ResourceType="VPC", + TrafficType="REJECT", + LogDestinationType="cloud-watch-logs", ) except ClientError as client_error: - exception_type = client_error.response['Error']['Code'] + exception_type = client_error.response["Error"]["Code"] if exception_type in ["FlowLogAlreadyExists"]: return { "response": { "message": f'VPC Flow Logs for {event["vpc"]} already enabled', - "status": "Success" + "status": "Success", } } else: - exit(f'ERROR CREATING LOGGROUP {vpc_flow_loggroup}: {str(exception_type)}') + exit(f"ERROR CREATING LOGGROUP {vpc_flow_loggroup}: {str(exception_type)}") except Exception as e: - exit(f'create_flow_logs failed {str(e)}') + exit(f"create_flow_logs failed {str(e)}") # wait for Flow Log creation to propagate. Exits on timeout (no need to check results) wait_for_flowlogs(ec2_client, wait_interval, max_retries, vpc_flow_loggroup) @@ -5560,7 +6919,7 @@ def enable_flow_logs(event, _): return { "response": { "message": f'VPC Flow Logs enabled for {event["vpc"]} to {vpc_flow_loggroup}', - "status": "Success" + "status": "Success", } }", }, @@ -5799,7 +7158,7 @@ If KmsKeyId is a Customer-Managed Key (CMK), then AutomationAssumeRole must have "type": "String", }, "SourceDBSnapshotIdentifier": { - "allowedPattern": "^(?:rds:)?(?!.*--.*)(?!.*-$)[a-zA-Z][a-zA-Z0-9-]{0,254}$", + "allowedPattern": "^(?:rds:|awsbackup:)?(?!.*--.*)(?!.*-$)[a-zA-Z][a-zA-Z0-9-]{0,254}$", "description": "(Required) The name of the unencrypted RDS snapshot or cluster snapshot to copy.", "type": "String", }, @@ -5838,7 +7197,7 @@ This runbook works an the account level to remove public share on all EBS snapsh * Remediation.Output - stdout messages from the remediation ## Security Standards / Controls -* AFSBP v1.0.0: EC2.1 +* AWS FSBP v1.0.0: EC2.1 * CIS v1.2.0: n/a * PCI: EC2.1 ", @@ -5855,72 +7214,66 @@ This runbook works an the account level to remove public share on all EBS snapsh "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import json import boto3 from botocore.config import Config -from botocore.exceptions import ClientError -boto_config = Config( - retries = { - 'mode': 'standard', - 'max_attempts': 10 - } - ) +boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) + def connect_to_ec2(boto_config): - return boto3.client('ec2', config=boto_config) + return boto3.client("ec2", config=boto_config) + def get_public_snapshots(event, _): - account_id = event['account_id'] + account_id = event["account_id"] - if 'testmode' in event and event['testmode']: + if "testmode" in event and event["testmode"]: return [ "snap-12341234123412345", "snap-12341234123412345", "snap-12341234123412345", "snap-12341234123412345", - "snap-12341234123412345" + "snap-12341234123412345", ] return list_public_snapshots(account_id) + def list_public_snapshots(account_id): ec2 = connect_to_ec2(boto_config) - control_token = 'start' + control_token = "start" try: - public_snapshot_ids = [] while control_token: - - if control_token == 'start': # needed a value to start the loop. Now reset it - control_token = '' + if ( + control_token == "start" + ): # needed a value to start the loop. Now reset it + control_token = "" kwargs = { - 'MaxResults': 100, - 'OwnerIds': [ account_id ], - 'RestorableByUserIds': [ 'all' ] + "MaxResults": 100, + "OwnerIds": [account_id], + "RestorableByUserIds": ["all"], } if control_token: - kwargs['NextToken'] = control_token + kwargs["NextToken"] = control_token - response = ec2.describe_snapshots( - **kwargs - ) + response = ec2.describe_snapshots(**kwargs) - for snapshot in response['Snapshots']: - public_snapshot_ids.append(snapshot['SnapshotId']) + for snapshot in response["Snapshots"]: + public_snapshot_ids.append(snapshot["SnapshotId"]) - if 'NextToken' in response: - control_token = response['NextToken'] + if "NextToken" in response: + control_token = response["NextToken"] else: - control_token = '' + control_token = "" return public_snapshot_ids except Exception as e: print(e) - exit('Failed to describe_snapshots')", + exit("Failed to describe_snapshots")", }, "name": "GetPublicSnapshotIds", "outputs": [ @@ -5943,53 +7296,48 @@ def list_public_snapshots(account_id): "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import json + import boto3 from botocore.config import Config -from botocore.exceptions import ClientError + def connect_to_ec2(boto_config): - return boto3.client('ec2', config=boto_config) + return boto3.client("ec2", config=boto_config) + def make_snapshots_private(event, _): - boto_config = Config( - retries = { - 'mode': 'standard', - 'max_attempts': 10 - } - ) + boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) ec2 = connect_to_ec2(boto_config) remediated = [] - snapshots = event['snapshots'] + snapshots = event["snapshots"] success_count = 0 for snapshot_id in snapshots: try: ec2.modify_snapshot_attribute( - Attribute='CreateVolumePermission', - CreateVolumePermission={ - 'Remove': [{'Group': 'all'}] - }, - SnapshotId=snapshot_id + Attribute="CreateVolumePermission", + CreateVolumePermission={"Remove": [{"Group": "all"}]}, + SnapshotId=snapshot_id, ) - print(f'Snapshot {snapshot_id} permissions set to private') + print(f"Snapshot {snapshot_id} permissions set to private") remediated.append(snapshot_id) success_count += 1 except Exception as e: print(e) - print(f'FAILED to remediate Snapshot {snapshot_id}') + print(f"FAILED to remediate Snapshot {snapshot_id}") - result=json.dumps(ec2.describe_snapshots( - SnapshotIds=remediated - ), indent=2, default=str) + result = json.dumps( + ec2.describe_snapshots(SnapshotIds=remediated), indent=2, default=str + ) print(result) return { "response": { - "message": f'{success_count} of {len(snapshots)} Snapshot permissions set to private', - "status": "Success" + "message": f"{success_count} of {len(snapshots)} Snapshot permissions set to private", + "status": "Success", } }", }, @@ -6054,7 +7402,7 @@ This runbook removes public access to an RDS Snapshot * Remediation.Output - stdout messages from the remediation ## Security Standards / Controls -* AFSBP v1.0.0: RDS.1 +* AWS FSBP v1.0.0: RDS.1 * CIS v1.2.0: n/a * PCI: RDS.1 ", @@ -6070,62 +7418,199 @@ This runbook removes public access to an RDS Snapshot "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import json import boto3 from botocore.config import Config -from botocore.exceptions import ClientError -def connect_to_rds(): - boto_config = Config( - retries ={ - 'mode': 'standard' - } + +def connect_to_rds(): + boto_config = Config(retries={"mode": "standard"}) + return boto3.client("rds", config=boto_config) + + +def make_snapshot_private(event, _): + rds_client = connect_to_rds() + snapshot_id = event["DBSnapshotId"] + snapshot_type = event["DBSnapshotType"] + try: + if snapshot_type == "snapshot": + rds_client.modify_db_snapshot_attribute( + DBSnapshotIdentifier=snapshot_id, + AttributeName="restore", + ValuesToRemove=["all"], + ) + elif snapshot_type == "cluster-snapshot": + rds_client.modify_db_cluster_snapshot_attribute( + DBClusterSnapshotIdentifier=snapshot_id, + AttributeName="restore", + ValuesToRemove=["all"], + ) + else: + exit(f"Unrecognized snapshot_type {snapshot_type}") + + print(f"Remediation completed: {snapshot_id} public access removed.") + return { + "response": { + "message": f"Snapshot {snapshot_id} permissions set to private", + "status": "Success", + } + } + except Exception as e: + exit(f"Remediation failed for {snapshot_id}: {str(e)}")", + }, + "name": "MakeRDSSnapshotPrivate", + "outputs": [ + { + "Name": "Output", + "Selector": "$.Payload.response", + "Type": "StringMap", + }, + ], + }, + ], + "outputs": [ + "MakeRDSSnapshotPrivate.Output", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "DBSnapshotId": { + "allowedPattern": "^[a-zA-Z](?:[0-9a-zA-Z]+[-]{1})*[0-9a-zA-Z]{1,}$", + "type": "String", + }, + "DBSnapshotType": { + "allowedValues": [ + "cluster-snapshot", + "snapshot", + ], + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-MakeRDSSnapshotPrivate", + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ASRRemoveCodeBuildPrivilegedMode": { + "DependsOn": [ + "CreateWait8", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document Name - ASR-RemoveCodeBuildPrivilegedMode + +## What does this document do? +This document removes CodeBuild project privileged mode to remove a build project's Docker container access to all devices. + +## Input Parameters +* ProjectName: (Required) Name of the CodeBuild project (not the ARN). +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. + +## Security Standards / Controls +* AWS FSBP v1.0.0: CodeBuild.5 +* NIST 800-53 Rev5: CodeBuild.5 + +## Output Parameters +* RemoveCodeBuildPrivilegedMode.Output +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "lambda_handler", + "InputPayload": { + "project_name": "{{ProjectName}}", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import boto3 +from botocore.config import Config + +boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) + + +def connect_to_codebuild(): + return boto3.client("codebuild", config=boto_config) + + +def lambda_handler(event, _): + """ + Removes CodeBuild privileged mode from a project. + + \`event\` should have the following keys and values: + \`project_name\`: the name of the codebuild project with privileged mode enabled. + + \`context\` is ignored + """ + project_name = event["project_name"] + + project_attributes = get_project_info(project_name) + + initial_environment = project_attributes["projects"][0]["environment"] + + initial_environment["privilegedMode"] = False + + remove_privileged_mode(project_name, initial_environment) + + updated_project_attributes = get_project_info(project_name) + + privileged_status = updated_project_attributes["projects"][0]["environment"][ + "privilegedMode" + ] + + if privileged_status is False: + return {"privilegedMode": privileged_status} + + raise RuntimeError( + f"ASR Remediation failed - {project_name} did not have privileged mode removed from project." ) - return boto3.client('rds', config=boto_config) -def make_snapshot_private(event, _): - rds_client = connect_to_rds() - snapshot_id = event['DBSnapshotId'] - snapshot_type = event['DBSnapshotType'] +def remove_privileged_mode(project_name, environment): + """ + Removes privileged_status from CodeBuild Project + """ + codebuild = connect_to_codebuild() try: - if (snapshot_type == 'snapshot'): - rds_client.modify_db_snapshot_attribute( - DBSnapshotIdentifier=snapshot_id, - AttributeName='restore', - ValuesToRemove=['all'] - ) - elif (snapshot_type == 'cluster-snapshot'): - rds_client.modify_db_cluster_snapshot_attribute( - DBClusterSnapshotIdentifier=snapshot_id, - AttributeName='restore', - ValuesToRemove=['all'] - ) - else: - exit(f'Unrecognized snapshot_type {snapshot_type}') + codebuild.update_project(name=project_name, environment=environment) + + except Exception as e: + exit("There was an error updating codebuild project: " + str(e)) + + +def get_project_info(project_name): + """ + Gets CodeBuild Project info + """ + codebuild = connect_to_codebuild() + try: + project_attributes = codebuild.batch_get_projects(names=[project_name]) + return project_attributes - print(f'Remediation completed: {snapshot_id} public access removed.') - return { - "response": { - "message": f'Snapshot {snapshot_id} permissions set to private', - "status": "Success" - } - } except Exception as e: - exit(f'Remediation failed for {snapshot_id}: {str(e)}')", + exit("Failed to get attributes of project: " + str(e))", }, - "name": "MakeRDSSnapshotPrivate", + "name": "RemoveCodeBuildPrivilegedMode", "outputs": [ { "Name": "Output", - "Selector": "$.Payload.response", + "Selector": "$.Payload", "Type": "StringMap", }, ], + "timeoutSeconds": 600, }, ], "outputs": [ - "MakeRDSSnapshotPrivate.Output", + "RemoveCodeBuildPrivilegedMode.Output", ], "parameters": { "AutomationAssumeRole": { @@ -6133,15 +7618,9 @@ def make_snapshot_private(event, _): "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", "type": "String", }, - "DBSnapshotId": { - "allowedPattern": "^[a-zA-Z](?:[0-9a-zA-Z]+[-]{1})*[0-9a-zA-Z]{1,}$", - "type": "String", - }, - "DBSnapshotType": { - "allowedValues": [ - "cluster-snapshot", - "snapshot", - ], + "ProjectName": { + "allowedPattern": "^[A-Za-z0-9][A-Za-z0-9\\-_]{1,254}$", + "description": "(Required) The project name (not the ARN).", "type": "String", }, }, @@ -6149,7 +7628,7 @@ def make_snapshot_private(event, _): }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-MakeRDSSnapshotPrivate", + "Name": "ASR-RemoveCodeBuildPrivilegedMode", "UpdateMethod": "NewVersion", }, "Type": "AWS::SSM::Document", @@ -6177,9 +7656,9 @@ function. The remediation is to remove the SID of the public policy. * RemoveLambdaPublicAccess.Output - stdout messages from the remediation ## Security Standards / Controls -* AFSBP v1.0.0: Lambda.1 -* CIS v1.2.0: n/a -* PCI: Lambda.1 +* AWS FSBP v1.0.0: Lambda.1 +* CIS v1.2.0: n/a +* PCI: Lambda.1 ", "mainSteps": [ { @@ -6193,83 +7672,96 @@ function. The remediation is to remove the SID of the public policy. "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import json + import boto3 from botocore.config import Config from botocore.exceptions import ClientError -boto_config = Config( - retries = { - 'mode': 'standard', - 'max_attempts': 10 - } -) +boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) + def connect_to_lambda(boto_config): - return boto3.client('lambda', config=boto_config) + return boto3.client("lambda", config=boto_config) + def print_policy_before(policy): - print('Resource Policy to be deleted:') + print("Resource Policy to be deleted:") print(json.dumps(policy, indent=2, default=str)) + def public_s3_statement_check(statement, principal): """ This function checks if the user has given access to an S3 bucket without providing an AWS account. """ try: empty_source_account_check = False - if ("StringEquals" in statement["Condition"]): - empty_source_account_check = ("AWS:SourceAccount" not in statement["Condition"]["StringEquals"]) + if "StringEquals" in statement["Condition"]: + empty_source_account_check = ( + "AWS:SourceAccount" not in statement["Condition"]["StringEquals"] + ) else: empty_source_account_check = True - return principal.get("Service", "") == "s3.amazonaws.com" and empty_source_account_check + return ( + principal.get("Service", "") == "s3.amazonaws.com" + and empty_source_account_check + ) except KeyError: return principal.get("Service", "") == "s3.amazonaws.com" + def remove_resource_policy(functionname, sid, client): try: - client.remove_permission( - FunctionName=functionname, - StatementId=sid - ) - print(f'SID {sid} removed from Lambda function {functionname}') + client.remove_permission(FunctionName=functionname, StatementId=sid) + print(f"SID {sid} removed from Lambda function {functionname}") except Exception as e: - exit(f'FAILED: SID {sid} was NOT removed from Lambda function {functionname} - {str(e)}') + exit( + f"FAILED: SID {sid} was NOT removed from Lambda function {functionname} - {str(e)}" + ) + def remove_public_statement(client, functionname, statement, principal): - if principal == "*" or (isinstance(principal, dict) and (principal.get("AWS","") == "*" or public_s3_statement_check(statement, principal))): + if principal == "*" or ( + isinstance(principal, dict) + and ( + principal.get("AWS", "") == "*" + or public_s3_statement_check(statement, principal) + ) + ): print_policy_before(statement) - remove_resource_policy(functionname, statement['Sid'], client) + remove_resource_policy(functionname, statement["Sid"], client) -def remove_lambda_public_access(event, _): +def remove_lambda_public_access(event, _): client = connect_to_lambda(boto_config) - functionname = event['FunctionName'] + functionname = event["FunctionName"] try: response = client.get_policy(FunctionName=functionname) - policy = response['Policy'] + policy = response["Policy"] policy_json = json.loads(policy) - statements = policy_json['Statement'] + statements = policy_json["Statement"] - print('Scanning for public resource policies in ' + functionname) + print("Scanning for public resource policies in " + functionname) for statement in statements: - remove_public_statement(client, functionname, statement, statement['Principal']) + remove_public_statement( + client, functionname, statement, statement["Principal"] + ) client.get_policy(FunctionName=functionname) verify(functionname) except ClientError as ex: - exception_type = ex.response['Error']['Code'] - if exception_type in ['ResourceNotFoundException']: + exception_type = ex.response["Error"]["Code"] + if exception_type in ["ResourceNotFoundException"]: print("Remediation completed. Resource policy is now empty.") else: - exit(f'ERROR: Remediation failed for RemoveLambdaPublicAccess: {str(ex)}') + exit(f"ERROR: Remediation failed for RemoveLambdaPublicAccess: {str(ex)}") except Exception as e: - exit(f'ERROR: Remediation failed for RemoveLambdaPublicAccess: {str(e)}') + exit(f"ERROR: Remediation failed for RemoveLambdaPublicAccess: {str(e)}") -def verify(function_name_to_check): +def verify(function_name_to_check): client = connect_to_lambda(boto_config) try: @@ -6279,13 +7771,13 @@ def verify(function_name_to_check): print(json.dumps(response, indent=2, default=str)) except ClientError as ex: - exception_type = ex.response['Error']['Code'] - if exception_type in ['ResourceNotFoundException']: + exception_type = ex.response["Error"]["Code"] + if exception_type in ["ResourceNotFoundException"]: print("Remediation completed. Resource policy is now empty.") else: - exit(f'ERROR: {exception_type} on get_policy') + exit(f"ERROR: {exception_type} on get_policy") except Exception as e: - exit(f'Exception while retrieving lambda function policy: {str(e)}')", + exit(f"Exception while retrieving lambda function policy: {str(e)}")", }, "name": "RemoveLambdaPublicAccess", "outputs": [ @@ -6320,6 +7812,133 @@ def verify(function_name_to_check): }, "Type": "AWS::SSM::Document", }, + "ASRRemoveUnusedSecret": { + "DependsOn": [ + "CreateWait11", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document name - ASR-RemoveUnusedSecret + +## What does this document do? + This document deletes a secret that has been unused for the number of days specified in the unusedForDays parameter (Default: 90 days). + There is a 30 day period to recover the secret after it is deleted. + [DeleteSecret](https://docs.aws.amazon.com/secretsmanager/latest/apireference/API_DeleteSecret.html) API. + + +## Input Parameters +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +* SecretARN: (Required) The ARN of the Secrets Manager secret. +* UnusedForDays: (Optional) Maximum number of days that a secret can remain unused. + +## Security Standards / Controls +* AFSBP v1.0.0: SecretsManager.3 +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "lambda_handler", + "InputPayload": { + "SecretARN": "{{ SecretARN }}", + "UnusedForDays": "{{ UnusedForDays }}", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +from datetime import datetime, timezone + +import boto3 +from botocore.config import Config + +BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + +# Current date in the same format SecretsManager tracks LastAccessedDate +DATE_TODAY = datetime.now().replace( + hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc +) + + +def connect_to_secretsmanager(): + return boto3.client("secretsmanager", config=BOTO_CONFIG) + + +def lambda_handler(event, _): + secret_arn = event["SecretARN"] + unused_for_days = event["UnusedForDays"] + + secretsmanager = connect_to_secretsmanager() + + # Describe the secret + response = secretsmanager.describe_secret(SecretId=secret_arn) + + # Confirm the secret has been unused for more days than UnusedForDays parameter specifies + if "LastAccessedDate" in response and ( + DATE_TODAY - response["LastAccessedDate"] + ).days > int(unused_for_days): + # Delete the secret, with 30 day recovery window + response = secretsmanager.delete_secret( + SecretId=secret_arn, + RecoveryWindowInDays=30, + ) + + # Confirm secret was scheduled for deletion + if "DeletionDate" in response: + return { + "message": "Deleted the unused secret.", + "status": "Success", + } + else: + exit(f"Failed to delete the unused secret: {secret_arn}") + + exit( + f"The secret {secret_arn} cannot be deleted because it has been accessed within the past {unused_for_days} days." + )", + }, + "maxAttempts": 3, + "name": "RemoveUnusedSecret", + "outputs": [ + { + "Name": "Output", + "Selector": "$.Payload", + "Type": "StringMap", + }, + ], + "timeoutSeconds": 600, + }, + ], + "outputs": [ + "RemoveUnusedSecret.Output", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "SecretARN": { + "allowedPattern": "^arn:(?:aws|aws-cn|aws-us-gov):secretsmanager:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:secret:([A-Za-z0-9\\/_+=.@-]+)$", + "description": "(Required) The ARN of the Secrets Manager secret.", + "type": "String", + }, + "UnusedForDays": { + "allowedPattern": "^\\d{0,3}$", + "default": 90, + "description": "(Optional) Maximum number of days that a secret can remain unused.", + "type": "Integer", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-RemoveUnusedSecret", + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, "ASRRemoveVPCDefaultSecurityGroupRules": { "DependsOn": [ "CreateWait6", @@ -6507,115 +8126,128 @@ Returns new project environment variables and SSM parameter information (without "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 +import re from json import dumps + from boto3 import client from botocore.config import Config from botocore.exceptions import ClientError -import re -boto_config = Config(retries = {'mode': 'standard'}) +boto_config = Config(retries={"mode": "standard"}) + +CREDENTIAL_NAMES_UPPER = ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"] -CREDENTIAL_NAMES_UPPER = [ - 'AWS_ACCESS_KEY_ID', - 'AWS_SECRET_ACCESS_KEY' -] def connect_to_ssm(boto_config): - return client('ssm', config = boto_config) + return client("ssm", config=boto_config) + def connect_to_iam(boto_config): - return client('iam', config = boto_config) + return client("iam", config=boto_config) + def is_clear_text_credential(env_var): - if env_var.get('type') != 'PLAINTEXT': + if env_var.get("type") != "PLAINTEXT": return False - return any(env_var.get('name').upper() == credential_name for credential_name in CREDENTIAL_NAMES_UPPER) + return any( + env_var.get("name").upper() == credential_name + for credential_name in CREDENTIAL_NAMES_UPPER + ) + def get_project_ssm_namespace(project_name): - return f'/CodeBuild/{ project_name }' + return f"/CodeBuild/{ project_name }" + def create_parameter(project_name, env_var): - env_var_name = env_var.get('name') - parameter_name = f'{ get_project_ssm_namespace(project_name) }/env/{ env_var_name }' + env_var_name = env_var.get("name") + parameter_name = f"{ get_project_ssm_namespace(project_name) }/env/{ env_var_name }" ssm_client = connect_to_ssm(boto_config) try: response = ssm_client.put_parameter( - Name = parameter_name, - Description = 'Automatically created by ASR', - Value = env_var.get("value"), - Type = 'SecureString', - Overwrite = False, - DataType = 'text' + Name=parameter_name, + Description="Automatically created by ASR", + Value=env_var.get("value"), + Type="SecureString", + Overwrite=False, + DataType="text", ) except ClientError as client_exception: - exception_type = client_exception.response['Error']['Code'] - if exception_type == 'ParameterAlreadyExists': - print(f'Parameter { parameter_name } already exists. This remediation may have been run before.') - print('Ignoring exception - remediation continues.') + exception_type = client_exception.response["Error"]["Code"] + if exception_type == "ParameterAlreadyExists": + print( + f"Parameter { parameter_name } already exists. This remediation may have been run before." + ) + print("Ignoring exception - remediation continues.") response = None else: - exit(f'ERROR: Unhandled client exception: { client_exception }') + exit(f"ERROR: Unhandled client exception: { client_exception }") except Exception as e: - exit(f'ERROR: could not create SSM parameter { parameter_name }: { str(e) }') + exit(f"ERROR: could not create SSM parameter { parameter_name }: { str(e) }") return response, parameter_name + def create_policy(region, account, partition, project_name): iam_client = connect_to_iam(boto_config) - policy_resource_filter = f'arn:{ partition }:ssm:{ region }:{ account }:parameter{ get_project_ssm_namespace(project_name) }/*' + policy_resource_filter = f"arn:{ partition }:ssm:{ region }:{ account }:parameter{ get_project_ssm_namespace(project_name) }/*" policy_document = { - 'Version': '2012-10-17', - 'Statement': [ + "Version": "2012-10-17", + "Statement": [ { - 'Effect': 'Allow', - 'Action': [ - 'ssm:GetParameter', - 'ssm:GetParameters' - ], - 'Resource': policy_resource_filter + "Effect": "Allow", + "Action": ["ssm:GetParameter", "ssm:GetParameters"], + "Resource": policy_resource_filter, } - ] + ], } - policy_name = f'CodeBuildSSMParameterPolicy-{ project_name }-{ region }' + policy_name = f"CodeBuildSSMParameterPolicy-{ project_name }-{ region }" try: response = iam_client.create_policy( - Description = "Automatically created by ASR", - PolicyDocument = dumps(policy_document), - PolicyName = policy_name + Description="Automatically created by ASR", + PolicyDocument=dumps(policy_document), + PolicyName=policy_name, ) except ClientError as client_exception: - exception_type = client_exception.response['Error']['Code'] - if exception_type == 'EntityAlreadyExists': - print(f'Policy { "" } already exists. This remediation may have been run before.') - print('Ignoring exception - remediation continues.') + exception_type = client_exception.response["Error"]["Code"] + if exception_type == "EntityAlreadyExists": + print( + f'Policy { "" } already exists. This remediation may have been run before.' + ) + print("Ignoring exception - remediation continues.") # Attach needs to know the ARN of the created policy response = { - 'Policy': { - 'Arn': f'arn:{ partition }:iam::{ account }:policy/{ policy_name }' + "Policy": { + "Arn": f"arn:{ partition }:iam::{ account }:policy/{ policy_name }" } } else: - exit(f'ERROR: Unhandled client exception: { client_exception }') + exit(f"ERROR: Unhandled client exception: { client_exception }") except Exception as e: - exit(f'ERROR: could not create access policy { policy_name }: { str(e) }') + exit(f"ERROR: could not create access policy { policy_name }: { str(e) }") return response + def attach_policy(policy_arn, service_role_name): iam_client = connect_to_iam(boto_config) try: response = iam_client.attach_role_policy( - PolicyArn = policy_arn, - RoleName = service_role_name + PolicyArn=policy_arn, RoleName=service_role_name ) except ClientError as client_exception: - exit(f'ERROR: Unhandled client exception: { client_exception }') + exit(f"ERROR: Unhandled client exception: { client_exception }") except Exception as e: - exit(f'ERROR: could not attach policy { policy_arn } to role { service_role_name }: { str(e) }') + exit( + f"ERROR: could not attach policy { policy_arn } to role { service_role_name }: { str(e) }" + ) return response + def parse_project_arn(arn): - pattern = re.compile(r'arn:(aws[a-zA-Z-]*):codebuild:([a-z]{2}(?:-gov)?-[a-z]+-\\d):(\\d{12}):project/[A-Za-z0-9][A-Za-z0-9\\-_]{1,254}$') + pattern = re.compile( + r"arn:(aws[a-zA-Z-]*):codebuild:([a-z]{2}(?:-gov)?-[a-z]+-\\d):(\\d{12}):project/[A-Za-z0-9][A-Za-z0-9\\-_]{1,254}$" + ) match = pattern.match(arn) if match: partition = match.group(1) @@ -6625,21 +8257,22 @@ def parse_project_arn(arn): else: raise ValueError + def replace_credentials(event, _): - project_info = event.get('ProjectInfo') - project_name = project_info.get('name') - project_env = project_info.get('environment') - project_env_vars = project_env.get('environmentVariables') + project_info = event.get("ProjectInfo") + project_name = project_info.get("name") + project_env = project_info.get("environment") + project_env_vars = project_env.get("environmentVariables") updated_project_env_vars = [] parameters = [] for env_var in project_env_vars: - if (is_clear_text_credential(env_var)): + if is_clear_text_credential(env_var): parameter_response, parameter_name = create_parameter(project_name, env_var) updated_env_var = { - 'name': env_var.get('name'), - 'type': 'PARAMETER_STORE', - 'value': parameter_name + "name": env_var.get("name"), + "type": "PARAMETER_STORE", + "value": parameter_name, } updated_project_env_vars.append(updated_env_var) parameters.append(parameter_response) @@ -6647,25 +8280,25 @@ def replace_credentials(event, _): updated_project_env_vars.append(env_var) updated_project_env = project_env - updated_project_env['environmentVariables'] = updated_project_env_vars + updated_project_env["environmentVariables"] = updated_project_env_vars - partition, region, account = parse_project_arn(project_info.get('arn')) + partition, region, account = parse_project_arn(project_info.get("arn")) policy = create_policy(region, account, partition, project_name) - service_role_arn = project_info.get('serviceRole') - service_role_name = service_role_arn[service_role_arn.rfind('/') + 1:] - attach_response = attach_policy(policy['Policy']['Arn'], service_role_name) + service_role_arn = project_info.get("serviceRole") + service_role_name = service_role_arn[service_role_arn.rfind("/") + 1 :] + attach_response = attach_policy(policy["Policy"]["Arn"], service_role_name) # datetimes are not serializable, so convert them to ISO 8601 strings - policy_datetime_keys = ['CreateDate', 'UpdateDate'] + policy_datetime_keys = ["CreateDate", "UpdateDate"] for key in policy_datetime_keys: - if key in policy['Policy']: - policy['Policy'][key] = policy['Policy'][key].isoformat() + if key in policy["Policy"]: + policy["Policy"][key] = policy["Policy"][key].isoformat() return { - 'UpdatedProjectEnv': updated_project_env, - 'Parameters': parameters, - 'Policy': policy, - 'AttachResponse': attach_response + "UpdatedProjectEnv": updated_project_env, + "Parameters": parameters, + "Policy": policy, + "AttachResponse": attach_response, }", }, "isCritical": true, @@ -6712,7 +8345,178 @@ Changes the settings of a build project. "outputs": [ { "Name": "Output", - "Selector": "$.Payload.output", + "Selector": "$.Payload.output", + "Type": "StringMap", + }, + ], + "timeoutSeconds": 600, + }, + ], + "outputs": [ + "CreateParameters.Parameters", + "CreateParameters.Policy", + "CreateParameters.AttachResponse", + "UpdateProject.Output", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "ProjectName": { + "allowedPattern": "^[A-Za-z0-9][A-Za-z0-9\\-_]{1,254}$", + "description": "(Required) The project name (not the ARN).", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-ReplaceCodeBuildClearTextCredentials", + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ASRRevokeUnauthorizedInboundRules": { + "DependsOn": [ + "CreateWait11", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document name - ASR-RevokeUnauthorizedInboundRules + +## What does this document do? +This document revokes inbound security group rules that allow unrestricted access to ports that are not authorized. +Authorized ports are listed in authorizedTcpPorts and authorizedUdpPorts parameters. + +## Input Parameters +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +* SecurityGroupId: (Required) The ID of the Seurity Group. +* AuthorizedTcpPorts: (Optional) List of TCP ports authorized to be open to 0.0.0.0/0 or ::/0. +* AuthorizedUdpPorts: (Optional) List of UDP ports authorized to be open to 0.0.0.0/0 or ::/0. + +## Security Standards / Controls +* AFSBP v1.0.0: EC2.18 +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "lambda_handler", + "InputPayload": { + "AuthorizedTcpPorts": "{{ AuthorizedTcpPorts }}", + "AuthorizedUdpPorts": "{{ AuthorizedUdpPorts }}", + "SecurityGroupId": "{{ SecurityGroupId }}", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import boto3 +from botocore.config import Config + +BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + +# IPV4 and IPV6 open access +OPENIPV4 = "0.0.0.0/0" +OPENIPV6 = "::/0" + + +def connect_to_ec2(): + return boto3.client("ec2", config=BOTO_CONFIG) + + +# Function to check if rule has open access to unauthorized ports +def check_unauthorized_ports(authorized_ports, rule): + for port in range(rule["FromPort"], rule["ToPort"] + 1): + if port not in authorized_ports: + # Check for IPV4 open access + if "CidrIpv4" in rule and rule["CidrIpv4"] == OPENIPV4: + # Return True if rule has open access to unauthorized ports + return True + + # Check for IPV6 open access + elif "CidrIpv6" in rule and rule["CidrIpv6"] == OPENIPV6: + # Return True if rule is removed + return True + + # Return False if rule does not have open access to unauthorized ports + return False + + +def lambda_handler(event, _): + # Extract Security Group ID from event + security_group_id = event["SecurityGroupId"] + authorized_tcp_ports = set(map(int, event["AuthorizedTcpPorts"])) + authorized_udp_ports = set(map(int, event["AuthorizedUdpPorts"])) + + # Connect to EC2 service + ec2 = connect_to_ec2() + + # Get the security group rules + paginator = ec2.get_paginator("describe_security_group_rules") + + security_group_rules = paginator.paginate( + Filters=[ + { + "Name": "group-id", + "Values": [ + security_group_id, + ], + }, + ], + ) + + # List to return rules that are deleted + rules_deleted = [] + + for page in security_group_rules: + for rule in page["SecurityGroupRules"]: + # Remove TCP ingress rules + if ( + rule["IpProtocol"] == "tcp" + and not rule["IsEgress"] + and check_unauthorized_ports(authorized_tcp_ports, rule) + ): + # Delete the rule + ec2.revoke_security_group_ingress( + GroupId=security_group_id, + SecurityGroupRuleIds=[ + rule["SecurityGroupRuleId"], + ], + ) + # Add rule to list of deleted rules + rules_deleted.append(rule["SecurityGroupRuleId"]) + # Remove UDP ingress rules + if ( + rule["IpProtocol"] == "udp" + and not rule["IsEgress"] + and check_unauthorized_ports(authorized_udp_ports, rule) + ): + # Delete the rule + ec2.revoke_security_group_ingress( + GroupId=security_group_id, + SecurityGroupRuleIds=[ + rule["SecurityGroupRuleId"], + ], + ) + # Add rule to list of deleted rules + rules_deleted.append(rule["SecurityGroupRuleId"]) + + return { + "message": "Successfully removed security group rules on " + security_group_id, + "status": "Success", + "rules_deleted": rules_deleted, + }", + }, + "maxAttempts": 3, + "name": "RevokeUnauthorizedInboundRules", + "outputs": [ + { + "Name": "Output", + "Selector": "$.Payload", "Type": "StringMap", }, ], @@ -6720,20 +8524,30 @@ Changes the settings of a build project. }, ], "outputs": [ - "CreateParameters.Parameters", - "CreateParameters.Policy", - "CreateParameters.AttachResponse", - "UpdateProject.Output", + "RevokeUnauthorizedInboundRules.Output", ], "parameters": { + "AuthorizedTcpPorts": { + "default": [ + "80", + "443", + ], + "description": "(Optional) List of TCP ports authorized to be open to 0.0.0.0/0 or ::/0.", + "type": "StringList", + }, + "AuthorizedUdpPorts": { + "default": [], + "description": "(Optional) List of UDP ports authorized to be open to 0.0.0.0/0 or ::/0.", + "type": "StringList", + }, "AutomationAssumeRole": { "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role/[\\w+=,.@-]+$", "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", "type": "String", }, - "ProjectName": { - "allowedPattern": "^[A-Za-z0-9][A-Za-z0-9\\-_]{1,254}$", - "description": "(Required) The project name (not the ARN).", + "SecurityGroupId": { + "allowedPattern": "^sg-[a-z0-9\\-]+$", + "description": "(Required) The ID of the Seurity Group.", "type": "String", }, }, @@ -6741,7 +8555,7 @@ Changes the settings of a build project. }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-ReplaceCodeBuildClearTextCredentials", + "Name": "ASR-RevokeUnauthorizedInboundRules", "UpdateMethod": "NewVersion", }, "Type": "AWS::SSM::Document", @@ -6784,55 +8598,74 @@ This step deactivates IAM user access keys that have not been rotated in more th "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -from datetime import datetime, timezone, timedelta +from datetime import datetime, timezone +from typing import TYPE_CHECKING, Dict, List, Literal, TypedDict + import boto3 from botocore.config import Config -boto_config = Config( - retries ={ - 'mode': 'standard' - } -) +if TYPE_CHECKING: + from mypy_boto3_iam.type_defs import EmptyResponseMetadataTypeDef +else: + EmptyResponseMetadataTypeDef = object -responses = {} +boto_config = Config(retries={"mode": "standard"}) + + +class Response(TypedDict): + AccessKeyId: str + Response: EmptyResponseMetadataTypeDef + + +responses: Dict[Literal["DeactivateUnusedKeysResponse"], List[Response]] = {} responses["DeactivateUnusedKeysResponse"] = [] + def connect_to_iam(boto_config): - return boto3.client('iam', config=boto_config) + return boto3.client("iam", config=boto_config) + def connect_to_config(boto_config): - return boto3.client('config', config=boto_config) + return boto3.client("config", config=boto_config) + def get_user_name(resource_id): config_client = connect_to_config(boto_config) list_discovered_resources_response = config_client.list_discovered_resources( - resourceType='AWS::IAM::User', - resourceIds=[resource_id] + resourceType="AWS::IAM::User", resourceIds=[resource_id] ) - resource_name = list_discovered_resources_response.get("resourceIdentifiers")[0].get("resourceName") + resource_name = list_discovered_resources_response.get("resourceIdentifiers")[ + 0 + ].get("resourceName") return resource_name + def list_access_keys(user_name, include_inactive=False): iam_client = connect_to_iam(boto_config) active_keys = [] keys = iam_client.list_access_keys(UserName=user_name).get("AccessKeyMetadata", []) for key in keys: - if include_inactive or key.get('Status') == 'Active': + if include_inactive or key.get("Status") == "Active": active_keys.append(key) return active_keys + def deactivate_unused_keys(access_keys, max_credential_usage_age, user_name): iam_client = connect_to_iam(boto_config) for key in access_keys: print(key) - last_used = iam_client.get_access_key_last_used(AccessKeyId=key.get("AccessKeyId")).get("AccessKeyLastUsed") + last_used = iam_client.get_access_key_last_used( + AccessKeyId=key.get("AccessKeyId") + ).get("AccessKeyLastUsed") deactivate = False now = datetime.now(timezone.utc) days_since_creation = (now - key.get("CreateDate")).days last_used_days = (now - last_used.get("LastUsedDate", now)).days - print(f'Key {key.get("AccessKeyId")} is {days_since_creation} days old and last used {last_used_days} days ago') + print( + f'Key {key.get("AccessKeyId")} is {days_since_creation} days old and last used {last_used_days} days ago' + ) if days_since_creation > max_credential_usage_age: deactivate = True @@ -6843,23 +8676,42 @@ def deactivate_unused_keys(access_keys, max_credential_usage_age, user_name): if deactivate: deactivate_key(user_name, key.get("AccessKeyId")) + def deactivate_key(user_name, access_key): iam_client = connect_to_iam(boto_config) - responses["DeactivateUnusedKeysResponse"].append({"AccessKeyId": access_key, "Response": iam_client.update_access_key(UserName=user_name, AccessKeyId=access_key, Status="Inactive")}) + responses["DeactivateUnusedKeysResponse"].append( + { + "AccessKeyId": access_key, + "Response": iam_client.update_access_key( + UserName=user_name, AccessKeyId=access_key, Status="Inactive" + ), + } + ) + def verify_expired_credentials_revoked(responses, user_name): if responses.get("DeactivateUnusedKeysResponse"): for key in responses.get("DeactivateUnusedKeysResponse"): - key_data = next(filter(lambda x: x.get("AccessKeyId") == key.get("AccessKeyId"), list_access_keys(user_name, True))) #NOSONAR The value key should change at the next loop iteration as we're cycling through each response. + key_data = next( + filter( + lambda x: x.get("AccessKeyId") == key.get("AccessKeyId"), + list_access_keys(user_name, True), + ) + ) # NOSONAR The value key should change at the next loop iteration as we're cycling through each response. if key_data.get("Status") != "Inactive": - error_message = "VERIFICATION FAILED. ACCESS KEY {} NOT DEACTIVATED".format(key_data.get("AccessKeyId")) + error_message = ( + "VERIFICATION FAILED. ACCESS KEY {} NOT DEACTIVATED".format( + key_data.get("AccessKeyId") + ) + ) raise RuntimeError(error_message) return { "output": "Verification of unrotated access keys is successful.", - "http_responses": responses + "http_responses": responses, } + def unrotated_key_handler(event, _): user_name = get_user_name(event.get("IAMResourceId")) max_credential_usage_age = int(event.get("MaxCredentialUsageAge")) @@ -7111,7 +8963,7 @@ Adds an explicit deny to the bucket policy for specific restricted permissions. "Runtime": "python3.8", "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -''' +""" Given a bucket name and list of "sensitive" IAM permissions that shall not be allowed cross-account, create an explicit deny policy for all cross-account principals, denying access to all IAM permissions in the deny list for all @@ -7119,25 +8971,29 @@ resources. Note: - The deny list is a comma-separated list configured on the Config rule in parameter blacklistedActionPattern -''' +""" +import copy import json +from typing import Any, Dict + import boto3 -import copy from botocore.config import Config -from botocore.exceptions import ClientError -BOTO_CONFIG = Config( - retries = { - 'mode': 'standard', - 'max_attempts': 10 - } - ) +BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + def connect_to_s3(): - return boto3.client('s3', config=BOTO_CONFIG) + return boto3.client("s3", config=BOTO_CONFIG) + def get_partition(): - return boto3.client('sts', config=BOTO_CONFIG).get_caller_identity().get('Arn').split(':')[1] + return ( + boto3.client("sts", config=BOTO_CONFIG) + .get_caller_identity() + .get("Arn") + .split(":")[1] + ) + class BucketToRemediate: def __init__(self, bucket_name): @@ -7155,73 +9011,92 @@ class BucketToRemediate: self.partition = get_partition() def set_account_id_from_event(self, event): - self.account_id = event.get('accountid') or exit('AWS Account not specified') + self.account_id = event.get("accountid") or exit("AWS Account not specified") def set_denylist_from_event(self, event): - self.denylist = event.get('denylist').split(',') or exit('DenyList is empty or not a comma-delimited string') # Expect a comma seperated list in a string + self.denylist = event.get("denylist").split(",") or exit( + "DenyList is empty or not a comma-delimited string" + ) # Expect a comma seperated list in a string def get_current_bucket_policy(self): try: - self.bucket_policy = connect_to_s3().get_bucket_policy( - Bucket=self.bucket_name, - ExpectedBucketOwner=self.account_id - ).get('Policy') + self.bucket_policy = ( + connect_to_s3() + .get_bucket_policy( + Bucket=self.bucket_name, ExpectedBucketOwner=self.account_id + ) + .get("Policy") + ) except Exception as e: print(e) - exit(f'Failed to retrieve the bucket policy: {self.account_id} {self.bucket_name}') + exit( + f"Failed to retrieve the bucket policy: {self.account_id} {self.bucket_name}" + ) def update_bucket_policy(self): try: connect_to_s3().put_bucket_policy( Bucket=self.bucket_name, ExpectedBucketOwner=self.account_id, - Policy=self.bucket_policy + Policy=self.bucket_policy, ) except Exception as e: print(e) - exit(f'Failed to store the new bucket policy: {self.account_id} {self.bucket_name}') + exit( + f"Failed to store the new bucket policy: {self.account_id} {self.bucket_name}" + ) def __principal_is_asterisk(self, principals): - return (True if isinstance(principals, str) and principals == '*' else False) + return True if isinstance(principals, str) and principals == "*" else False def get_account_principals_from_bucket_policy_statement(self, statement_principals): aws_account_principals = [] for principal_type, principal in statement_principals.items(): - if principal_type != 'AWS': - continue # not an AWS account - aws_account_principals = principal if isinstance(principal, list) else [ principal ] + if principal_type != "AWS": + continue # not an AWS account + aws_account_principals = ( + principal if isinstance(principal, list) else [principal] + ) return aws_account_principals def create_explicit_deny_in_bucket_policy(self): - new_bucket_policy = json.loads(self.bucket_policy) + new_bucket_policy = json.loads(self.bucket_policy) # type: ignore[arg-type] deny_statement = DenyStatement(self) - for statement in new_bucket_policy['Statement']: - principals = statement.get('Principal', None) + for statement in new_bucket_policy["Statement"]: + principals = statement.get("Principal", None) if principals and not self.__principal_is_asterisk(principals): - account_principals = self.get_account_principals_from_bucket_policy_statement(copy.deepcopy(principals)) - deny_statement.add_next_principal_to_deny(account_principals, self.account_id) + account_principals = ( + self.get_account_principals_from_bucket_policy_statement( + copy.deepcopy(principals) + ) + ) + deny_statement.add_next_principal_to_deny( + account_principals, self.account_id + ) - if deny_statement.deny_statement_json: - new_bucket_policy['Statement'].append(deny_statement.deny_statement_json) + if ( + deny_statement.deny_statement_json + and len(deny_statement.deny_statement_json["Principal"]["AWS"]) > 0 + ): + new_bucket_policy["Statement"].append(deny_statement.deny_statement_json) self.bucket_policy = json.dumps(new_bucket_policy) return True + class DenyStatement: def __init__(self, bucket_object): self.bucket_object = bucket_object self.initialize_deny_statement() def initialize_deny_statement(self): - self.deny_statement_json = {} + self.deny_statement_json: Dict[str, Any] = {} self.deny_statement_json["Effect"] = "Deny" - self.deny_statement_json["Principal"] = { - "AWS": [] - } + self.deny_statement_json["Principal"] = {"AWS": []} self.deny_statement_json["Action"] = self.bucket_object.denylist self.deny_statement_json["Resource"] = [ - f'arn:{self.bucket_object.partition}:s3:::{self.bucket_object.bucket_name}', - f'arn:{self.bucket_object.partition}:s3:::{self.bucket_object.bucket_name}/*', + f"arn:{self.bucket_object.partition}:s3:::{self.bucket_object.bucket_name}", + f"arn:{self.bucket_object.partition}:s3:::{self.bucket_object.bucket_name}/*", ] def __str__(self): @@ -7231,7 +9106,7 @@ class DenyStatement: if len(principals_to_deny) == 0: return this_principal = principals_to_deny.pop() - principal_account = this_principal.split(':')[4] + principal_account = this_principal.split(":")[4] if principal_account and principal_account != bucket_account: self.add_deny_principal(this_principal) @@ -7241,13 +9116,10 @@ class DenyStatement: if principal_arn not in self.deny_statement_json["Principal"]["AWS"]: self.deny_statement_json["Principal"]["AWS"].append(principal_arn) - def add_deny_resource(self, resource_arn): - if self.deny_statement_json["Resource"] and resource_arn not in self.deny_statement_json.Resource: - self.deny_statement_json["Resource"].append(resource_arn) def update_bucket_policy(event, _): def __get_bucket_from_event(event): - bucket = event.get('bucket') or exit('Bucket not specified') + bucket = event.get("bucket") or exit("Bucket not specified") return bucket bucket_to_update = BucketToRemediate(__get_bucket_from_event(event)) @@ -7257,7 +9129,9 @@ def update_bucket_policy(event, _): if bucket_to_update.create_explicit_deny_in_bucket_policy(): bucket_to_update.update_bucket_policy() else: - exit(f'Unable to create an explicit deny statement for {bucket_to_update.bucket_name}')", + exit( + f"Unable to create an explicit deny statement for {bucket_to_update.bucket_name}" + )", }, "name": "PutS3BucketPolicyDeny", "outputs": [ @@ -7299,6 +9173,138 @@ def update_bucket_policy(event, _): }, "Type": "AWS::SSM::Document", }, + "ASRSetCloudFrontOriginDomain": { + "DependsOn": [ + "CreateWait9", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document name - ASR-SetCloudFrontOriginDomain + +## What does this document do? + This document updates the origin domain on a given CloudFront distribution to prevent a malicious third party from creating the referenced bucket and serving their own content through your distribution. + +## Input Parameters +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +* DistributionId: (Required) ID of the CloudFront Distribution to be updated. + +## Security Standards / Controls +* NIST80053 v5.0.0: CloudFront.12 +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "lambda_handler", + "InputPayload": { + "Id": "{{ DistributionId }}", + }, + "Runtime": "python3.8", + "Script": "import boto3 + + +def lambda_handler(event, _): + # Initialize the CloudFront client + cloudfront_client = boto3.client("cloudfront") + + # The ID of the CloudFront distribution you want to update + distribution_id = event["Id"] + + # Intentionally invalid special-use TLD + new_origin_domain = "cloudfront12remediation.example" + + # Get the current distribution configuration + distribution_config = cloudfront_client.get_distribution_config(Id=distribution_id) + + # Update the origin domain in the distribution configuration + distribution_config["DistributionConfig"]["Origins"]["Items"][0][ + "DomainName" + ] = new_origin_domain + + # Check if distribution is enabled and disable it + if distribution_config["DistributionConfig"]["Enabled"]: + distribution_config["DistributionConfig"]["Enabled"] = False + + # If using an S3 origin type, need to update to custom origin type + if ( + "S3OriginConfig" + in distribution_config["DistributionConfig"]["Origins"]["Items"][0] + ): + # Remove S3OriginConfig key + del distribution_config["DistributionConfig"]["Origins"]["Items"][0][ + "S3OriginConfig" + ] + + # Add CustomOriginConfig key + distribution_config["DistributionConfig"]["Origins"]["Items"][0][ + "CustomOriginConfig" + ] = { + "HTTPPort": 80, + "HTTPSPort": 443, + "OriginProtocolPolicy": "http-only", + "OriginSslProtocols": {"Quantity": 1, "Items": ["TLSv1.2"]}, + "OriginReadTimeout": 30, + "OriginKeepaliveTimeout": 5, + } + + # Update the distribution configuration + cloudfront_client.update_distribution( + DistributionConfig=distribution_config["DistributionConfig"], + Id=distribution_id, + IfMatch=distribution_config["ETag"], + ) + + updated_distribution = cloudfront_client.get_distribution_config(Id=distribution_id) + updated_origin_domain = updated_distribution["DistributionConfig"]["Origins"][ + "Items" + ][0]["DomainName"] + + if updated_origin_domain == "cloudfront12remediation.example": + return { + "message": "Origin domain updated successfully.", + "status": "Success", + } + else: + raise RuntimeError( + "Failed to update the origin domain. Updated origin domain did not match 'cloudfront12remediation.example'" + )", + }, + "name": "SetCloudFrontOriginDomain", + "outputs": [ + { + "Name": "Output", + "Selector": "$.Payload", + "Type": "StringMap", + }, + ], + "timeoutSeconds": 600, + }, + ], + "outputs": [ + "SetCloudFrontOriginDomain.Output", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "DistributionId": { + "allowedPattern": "^[A-Za-z0-9]*$", + "description": "(Required) The Distribution ID of the CloudFront distribution.", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-SetCloudFrontOriginDomain", + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, "ASRSetIAMPasswordPolicy": { "DependsOn": [ "CreateWait7", @@ -7390,15 +9396,212 @@ def update_and_verify_iam_user_password_policy(event, context): } raise Exception("VERIFICATION FAILED. AWS ACCOUNT PASSWORD POLICY NOT UPDATED.") - except iam_client.exceptions.NoSuchEntityException: - raise Exception("VERIFICATION FAILED. UNABLE TO UPDATE AWS ACCOUNT PASSWORD POLICY.")", + except iam_client.exceptions.NoSuchEntityException: + raise Exception("VERIFICATION FAILED. UNABLE TO UPDATE AWS ACCOUNT PASSWORD POLICY.")", + }, + "isEnd": true, + "name": "UpdateAndVerifyIamUserPasswordPolicy", + "outputs": [ + { + "Name": "Output", + "Selector": "$.Payload.output", + "Type": "StringMap", + }, + ], + "timeoutSeconds": 600, + }, + ], + "outputs": [ + "UpdateAndVerifyIamUserPasswordPolicy.Output", + ], + "parameters": { + "AllowUsersToChangePassword": { + "default": false, + "description": "(Optional) Allows all IAM users in your AWS account to use the AWS Management Console to change their own passwords.", + "type": "Boolean", + }, + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "HardExpiry": { + "default": false, + "description": "(Optional) Prevents IAM users from setting a new password after their password has expired.", + "type": "Boolean", + }, + "MaxPasswordAge": { + "allowedPattern": "^\\d{0,3}$|^10[0-8]\\d$|^109[0-5]$", + "default": 0, + "description": "(Optional) The number of days that an IAM user password is valid.", + "type": "Integer", + }, + "MinimumPasswordLength": { + "allowedPattern": "^[6-9]$|^[1-9]\\d$|^1[01]\\d$|^12[0-8]$", + "default": 6, + "description": "(Optional) The minimum number of characters allowed in an IAM user password.", + "type": "Integer", + }, + "PasswordReusePrevention": { + "allowedPattern": "^\\d{0,1}$|^1\\d$|^2[0-4]$", + "default": 0, + "description": "(Optional) Specifies the number of previous passwords that IAM users are prevented from reusing.", + "type": "Integer", + }, + "RequireLowercaseCharacters": { + "default": false, + "description": "(Optional) Specifies whether IAM user passwords must contain at least one lowercase character from the ISO basic Latin alphabet (a to z).", + "type": "Boolean", + }, + "RequireNumbers": { + "default": false, + "description": "(Optional) Specifies whether IAM user passwords must contain at least one numeric character (0 to 9).", + "type": "Boolean", + }, + "RequireSymbols": { + "default": false, + "description": "(Optional) Specifies whether IAM user passwords must contain at least one of the following non-alphanumeric characters :! @ \\# $ % ^ * ( ) _ + - = [ ] { } | '.", + "type": "Boolean", + }, + "RequireUppercaseCharacters": { + "default": false, + "description": "(Optional) Specifies whether IAM user passwords must contain at least one uppercase character from the ISO basic Latin alphabet (A to Z).", + "type": "Boolean", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-SetIAMPasswordPolicy", + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, + "ASRSetS3LifecyclePolicy": { + "DependsOn": [ + "CreateWait10", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document name - ASR-SetS3LifecyclePolicy + +## What does this document do? + This document sets an example lifecycle policy that transfers objects greater than 10 GB to S3 Intelligent Tiering after 90 days. + It is recommended to set lifecycle policies appropriate for the objects stored in your S3 bucket. + [PutBucketLifecycleConfiguration](https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutBucketLifecycleConfiguration.html) API. + + +## Input Parameters +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +* BucketName: (Required) The name of the S3 bucket. + +## Security Standards / Controls +* AFSBP v1.0.0: S3.13 +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "lambda_handler", + "InputPayload": { + "BucketName": "{{ BucketName }}", + "TargetExpirationDays": "{{ TargetExpirationDays }}", + "TargetTransitionDays": "{{ TargetTransitionDays }}", + "TargetTransitionStorageClass": "{{ TargetTransitionStorageClass }}", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +import boto3 +from botocore.config import Config + +BOTO_CONFIG = Config(retries={"mode": "standard", "max_attempts": 10}) + + +def connect_to_s3(): + return boto3.client("s3", config=BOTO_CONFIG) + + +def lambda_handler(event, _): + bucket_name = event["BucketName"] + target_transition_days = event["TargetTransitionDays"] + target_expiration_days = event["TargetExpirationDays"] + target_transition_storage_class = event["TargetTransitionStorageClass"] + rule_id = "S3.13 Remediation Example" + s3 = connect_to_s3() + + lifecycle_policy = {} + if target_expiration_days != 0: + lifecycle_policy = { + "Rules": [ + { + "ID": rule_id, + "Status": "Enabled", + "Expiration": { + "Days": target_expiration_days, + }, + "Transitions": [ + { + "Days": target_transition_days, + "StorageClass": target_transition_storage_class, + }, + ], + "Filter": { + "ObjectSizeGreaterThan": 131072, + }, + }, + ], + } + else: + lifecycle_policy = { + "Rules": [ + { + "ID": rule_id, + "Status": "Enabled", + "Transitions": [ + { + "Days": target_transition_days, + "StorageClass": target_transition_storage_class, + }, + ], + "Filter": { + "ObjectSizeGreaterThan": 131072, + }, + }, + ], + } + + # Set example lifecycle policy + # Moves objects larger than 128 KB to Intelligent Tiering storage class after 30 days + s3.put_bucket_lifecycle_configuration( + Bucket=bucket_name, LifecycleConfiguration=lifecycle_policy + ) + + # Get new lifecycle configuration + lifecycle_config = s3.get_bucket_lifecycle_configuration( + Bucket=bucket_name, + ) + + if lifecycle_config["Rules"][0]["ID"] == rule_id: + return { + "message": "Successfully set example S3 lifecycle policy. Review and update as needed.", + "status": "Success", + } + + else: + raise RuntimeError( + "Failed to set S3 lifecycle policy. Lifecycle rule ID did not match 'S3.13 Remediation Example'" + )", }, - "isEnd": true, - "name": "UpdateAndVerifyIamUserPasswordPolicy", + "maxAttempts": 3, + "name": "SetS3LifecyclePolicy", "outputs": [ { "Name": "Output", - "Selector": "$.Payload.output", + "Selector": "$.Payload", "Type": "StringMap", }, ], @@ -7406,68 +9609,41 @@ def update_and_verify_iam_user_password_policy(event, context): }, ], "outputs": [ - "UpdateAndVerifyIamUserPasswordPolicy.Output", + "SetS3LifecyclePolicy.Output", ], "parameters": { - "AllowUsersToChangePassword": { - "default": false, - "description": "(Optional) Allows all IAM users in your AWS account to use the AWS Management Console to change their own passwords.", - "type": "Boolean", - }, "AutomationAssumeRole": { "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role/[\\w+=,.@-]+$", "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", "type": "String", }, - "HardExpiry": { - "default": false, - "description": "(Optional) Prevents IAM users from setting a new password after their password has expired.", - "type": "Boolean", + "BucketName": { + "allowedPattern": "(?=^.{3,63}$)(?!^(\\d+\\.)+\\d+$)(^(([a-z0-9]|[a-z0-9][a-z0-9\\-]*[a-z0-9])\\.)*([a-z0-9]|[a-z0-9][a-z0-9\\-]*[a-z0-9])$)", + "description": "(Required) The name of the S3 bucket.", + "type": "String", }, - "MaxPasswordAge": { - "allowedPattern": "^\\d{0,3}$|^10[0-8]\\d$|^109[0-5]$", + "TargetExpirationDays": { "default": 0, - "description": "(Optional) The number of days that an IAM user password is valid.", + "description": "(Optional) The number of days until expiration used for the lifecycle policy.", "type": "Integer", }, - "MinimumPasswordLength": { - "allowedPattern": "^[6-9]$|^[1-9]\\d$|^1[01]\\d$|^12[0-8]$", - "default": 6, - "description": "(Optional) The minimum number of characters allowed in an IAM user password.", - "type": "Integer", - }, - "PasswordReusePrevention": { - "allowedPattern": "^\\d{0,1}$|^1\\d$|^2[0-4]$", - "default": 0, - "description": "(Optional) Specifies the number of previous passwords that IAM users are prevented from reusing.", + "TargetTransitionDays": { + "default": 30, + "description": "(Optional) The number of days until transition used for the lifecycle policy.", "type": "Integer", }, - "RequireLowercaseCharacters": { - "default": false, - "description": "(Optional) Specifies whether IAM user passwords must contain at least one lowercase character from the ISO basic Latin alphabet (a to z).", - "type": "Boolean", - }, - "RequireNumbers": { - "default": false, - "description": "(Optional) Specifies whether IAM user passwords must contain at least one numeric character (0 to 9).", - "type": "Boolean", - }, - "RequireSymbols": { - "default": false, - "description": "(Optional) Specifies whether IAM user passwords must contain at least one of the following non-alphanumeric characters :! @ \\# $ % ^ * ( ) _ + - = [ ] { } | '.", - "type": "Boolean", - }, - "RequireUppercaseCharacters": { - "default": false, - "description": "(Optional) Specifies whether IAM user passwords must contain at least one uppercase character from the ISO basic Latin alphabet (A to Z).", - "type": "Boolean", + "TargetTransitionStorageClass": { + "allowedPattern": ".*", + "default": "INTELLIGENT_TIERING", + "description": "(Optional) The name of the storage class that will be used for the lifecycle policy.", + "type": "String", }, }, "schemaVersion": "0.3", }, "DocumentFormat": "YAML", "DocumentType": "Automation", - "Name": "ASR-SetIAMPasswordPolicy", + "Name": "ASR-SetS3LifecyclePolicy", "UpdateMethod": "NewVersion", }, "Type": "AWS::SSM::Document", @@ -7494,9 +9670,9 @@ This document adds a bucket policy to require transmission over HTTPS for the gi * Remediation.Output - stdout messages from the remediation ## Security Standards / Controls -* AFSBP v1.0.0: S3.5 -* CIS v1.2.0: n/a -* PCI: S3.5 +* AWS FSBP v1.0.0: S3.5 +* CIS v1.2.0: n/a +* PCI: S3.5 ", "mainSteps": [ { @@ -7512,19 +9688,17 @@ This document adds a bucket policy to require transmission over HTTPS for the gi "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import json + import boto3 from botocore.config import Config from botocore.exceptions import ClientError -boto_config = Config( - retries = { - 'mode': 'standard', - 'max_attempts': 10 - } - ) +boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) + def connect_to_s3(): - return boto3.client('s3', config=boto_config) + return boto3.client("s3", config=boto_config) + def policy_to_add(bucket, partition): return { @@ -7532,63 +9706,57 @@ def policy_to_add(bucket, partition): "Action": "s3:*", "Effect": "Deny", "Resource": [ - f'arn:{partition}:s3:::{bucket}', - f'arn:{partition}:s3:::{bucket}/*' + f"arn:{partition}:s3:::{bucket}", + f"arn:{partition}:s3:::{bucket}/*", ], - "Condition": { - "Bool": { - "aws:SecureTransport": "false" - } - }, - "Principal": "*" + "Condition": {"Bool": {"aws:SecureTransport": "false"}}, + "Principal": "*", } + + def new_policy(): - return { - "Id": "BucketPolicy", - "Version": "2012-10-17", - "Statement": [] - } + return {"Id": "BucketPolicy", "Version": "2012-10-17", "Statement": []} + def add_ssl_bucket_policy(event, _): - bucket_name = event['bucket'] - account_id = event['accountid'] - aws_partition = event['partition'] + bucket_name = event["bucket"] + account_id = event["accountid"] + aws_partition = event["partition"] s3 = connect_to_s3() bucket_policy = {} try: existing_policy = s3.get_bucket_policy( - Bucket=bucket_name, - ExpectedBucketOwner=account_id + Bucket=bucket_name, ExpectedBucketOwner=account_id ) - bucket_policy = json.loads(existing_policy['Policy']) + bucket_policy = json.loads(existing_policy["Policy"]) except ClientError as ex: - exception_type = ex.response['Error']['Code'] + exception_type = ex.response["Error"]["Code"] # delivery channel already exists - return if exception_type not in ["NoSuchBucketPolicy"]: - exit(f'ERROR: Boto3 s3 ClientError: {exception_type} - {str(ex)}') + exit(f"ERROR: Boto3 s3 ClientError: {exception_type} - {str(ex)}") except Exception as e: - exit(f'ERROR getting bucket policy for {bucket_name}: {str(e)}') + exit(f"ERROR getting bucket policy for {bucket_name}: {str(e)}") if not bucket_policy: bucket_policy = new_policy() - print(f'Existing policy: {bucket_policy}') - bucket_policy['Statement'].append(policy_to_add(bucket_name, aws_partition)) + print(f"Existing policy: {bucket_policy}") + bucket_policy["Statement"].append(policy_to_add(bucket_name, aws_partition)) try: result = s3.put_bucket_policy( Bucket=bucket_name, Policy=json.dumps(bucket_policy, indent=4, default=str), - ExpectedBucketOwner=account_id + ExpectedBucketOwner=account_id, ) print(result) except ClientError as ex: - exception_type = ex.response['Error']['Code'] - exit(f'ERROR: Boto3 s3 ClientError: {exception_type} - {str(ex)}') + exception_type = ex.response["Error"]["Code"] + exit(f"ERROR: Boto3 s3 ClientError: {exception_type} - {str(ex)}") except Exception as e: - exit(f'ERROR putting bucket policy for {bucket_name}: {str(e)}') + exit(f"ERROR putting bucket policy for {bucket_name}: {str(e)}") - print(f'New policy: {bucket_policy}')", + print(f"New policy: {bucket_policy}")", }, "name": "Remediation", "outputs": [ @@ -7629,12 +9797,152 @@ def add_ssl_bucket_policy(event, _): }, "Type": "AWS::SSM::Document", }, + "ASRUpdateSecretRotationPeriod": { + "DependsOn": [ + "CreateWait10", + ], + "Properties": { + "Content": { + "assumeRole": "{{ AutomationAssumeRole }}", + "description": "### Document name - ASR-UpdateSecretRotationPeriod + +## What does this document do? + This document rotates a secret and sets its rotation period to 90 days. + [RotateSecret](https://docs.aws.amazon.com/secretsmanager/latest/apireference/API_RotateSecret.html) API. + + +## Input Parameters +* AutomationAssumeRole: (Required) The ARN of the role that allows Automation to perform the actions on your behalf. +* SecretARN: (Required) The ARN of the Secrets Manager secret. + +## Security Standards / Controls +* AFSBP v1.0.0: SecretsManager.4 +", + "mainSteps": [ + { + "action": "aws:executeScript", + "inputs": { + "Handler": "lambda_handler", + "InputPayload": { + "MaxDaysSinceRotation": "{{ MaxDaysSinceRotation }}", + "SecretARN": "{{ SecretARN }}", + }, + "Runtime": "python3.8", + "Script": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +import boto3 +from botocore.config import Config + +boto_config = Config(retries={"mode": "standard", "max_attempts": 10}) + + +def connect_to_secretsmanager(): + return boto3.client("secretsmanager", config=boto_config) + + +def lambda_handler(event, _): + secret_arn = event["SecretARN"] + max_days_since_rotation = event["MaxDaysSinceRotation"] + + secretsmanager = connect_to_secretsmanager() + + try: + # Rotate secret and set rotation schedule + secretsmanager.rotate_secret( + SecretId=secret_arn, + RotationRules={ + "AutomaticallyAfterDays": max_days_since_rotation, + }, + RotateImmediately=True, + ) + + # Verify secret rotation schedule updated. + response = secretsmanager.describe_secret(SecretId=secret_arn) + + if "RotationRules" in response: + if ( + response["RotationRules"]["AutomaticallyAfterDays"] + <= max_days_since_rotation + ): + return { + "message": f"Rotated secret and set rotation schedule to {max_days_since_rotation} days.", + "status": "Success", + } + else: + return { + "message": "Failed to rotate secret and set rotation schedule.", + "status": "Failed", + } + + # If secret was already rotated, an exception will be thrown. + except Exception as e: + # Verify secret rotation schedule updated. + response = secretsmanager.describe_secret(SecretId=secret_arn) + + if "RotationRules" in response: + if ( + response["RotationRules"]["AutomaticallyAfterDays"] + <= max_days_since_rotation + ): + return { + "message": f"Set rotation schedule to {max_days_since_rotation} days. Secret is already being rotated.", + "status": "Success", + } + else: + return { + "message": f"Failed to rotate secret and set rotation schedule: {str(e)}", + "status": "Failed", + }", + }, + "maxAttempts": 3, + "name": "UpdateSecretRotationPeriod", + "outputs": [ + { + "Name": "Output", + "Selector": "$.Payload", + "Type": "StringMap", + }, + ], + "timeoutSeconds": 600, + }, + ], + "outputs": [ + "UpdateSecretRotationPeriod.Output", + ], + "parameters": { + "AutomationAssumeRole": { + "allowedPattern": "^arn:(?:aws|aws-us-gov|aws-cn):iam::\\d{12}:role/[\\w+=,.@-]+$", + "description": "(Required) The ARN of the role that allows Automation to perform the actions on your behalf.", + "type": "String", + }, + "MaxDaysSinceRotation": { + "allowedPattern": "^\\d{0,3}$", + "default": 90, + "description": "(Optional) The number of days set for the secret's rotation period.", + "type": "Integer", + }, + "SecretARN": { + "allowedPattern": "^arn:(?:aws|aws-cn|aws-us-gov):secretsmanager:(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d):\\d{12}:secret:([A-Za-z0-9\\/_+=.@-]+)$", + "description": "(Required) The ARN of the Secrets Manager secret.", + "type": "String", + }, + }, + "schemaVersion": "0.3", + }, + "DocumentFormat": "YAML", + "DocumentType": "Automation", + "Name": "ASR-UpdateSecretRotationPeriod", + "UpdateMethod": "NewVersion", + }, + "Type": "AWS::SSM::Document", + }, "CreateWait0": { "DeletionPolicy": "Delete", "Properties": { "CreateIntervalSeconds": 1, "DeleteIntervalSeconds": 0, - "DocumentPropertiesHash": "5081ead48ffcae3f2226fd136f97331dd4ff628325a06417f2abac7c3005148f", + "DocumentPropertiesHash": "81507fa8cc08b25712d6bb11838b74debfaef70c57538bc5b4a41604c53f805a", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -7651,7 +9959,41 @@ def add_ssl_bucket_policy(event, _): "Properties": { "CreateIntervalSeconds": 1, "DeleteIntervalSeconds": 0, - "DocumentPropertiesHash": "ed6f0fed953f12a30fce8949d3584bea716efea4cf8d55d2b8d6bff413b7c3ee", + "DocumentPropertiesHash": "136f833c9afc46d6dbb74732440b922bbeab34f14b0e2ae4e1a71e0965e09acd", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 1, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "CreateWait10": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "CreateWait9", + ], + "Properties": { + "CreateIntervalSeconds": 1, + "DeleteIntervalSeconds": 0, + "DocumentPropertiesHash": "59245ae3ddbfd7a056178f016a36493661aafeac24e726703cc08a7731400729", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 1, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "CreateWait11": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "CreateWait10", + ], + "Properties": { + "CreateIntervalSeconds": 1, + "DeleteIntervalSeconds": 0, + "DocumentPropertiesHash": "c855d30b2277c8fb3f6e525a303816e91a32109f04e278c91b7e97958fed1127", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -7668,7 +10010,7 @@ def add_ssl_bucket_policy(event, _): "Properties": { "CreateIntervalSeconds": 1, "DeleteIntervalSeconds": 0, - "DocumentPropertiesHash": "386b3690120f0f70335f86c2272f23c7df6388bde888c7dfd94dee3ad9b586f3", + "DocumentPropertiesHash": "2d0fcae37e200f1766715e193822d8f770336256aa94a3a3d343ebd316f63b53", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -7685,7 +10027,7 @@ def add_ssl_bucket_policy(event, _): "Properties": { "CreateIntervalSeconds": 1, "DeleteIntervalSeconds": 0, - "DocumentPropertiesHash": "d8cb9fbe52d6cb8d12cf3f418251ea9ecf2a823155b7aeceaefc304e2999d008", + "DocumentPropertiesHash": "50ff92fe188d981a49c50a99a2a170b0bc8c070b4c6f3cc0f70ac1f3dea64c43", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -7702,7 +10044,7 @@ def add_ssl_bucket_policy(event, _): "Properties": { "CreateIntervalSeconds": 1, "DeleteIntervalSeconds": 0, - "DocumentPropertiesHash": "cbb21589711741582f016da0204b949e6bb393744d53b01dcc5383669d106dea", + "DocumentPropertiesHash": "a26ba78d0f128ca18d92c8df0267bd94d1fafe39cc465ff8073f15762b423bd0", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -7753,7 +10095,7 @@ def add_ssl_bucket_policy(event, _): "Properties": { "CreateIntervalSeconds": 1, "DeleteIntervalSeconds": 0, - "DocumentPropertiesHash": "971be703d38efdd2d75a17325953419f3244f959e839b5a0668b12146968ac0c", + "DocumentPropertiesHash": "a79a323f87cedc32cab2f0319b3b409458bcfb163d2c63c51fe7792a4e80456e", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -7770,7 +10112,24 @@ def add_ssl_bucket_policy(event, _): "Properties": { "CreateIntervalSeconds": 1, "DeleteIntervalSeconds": 0, - "DocumentPropertiesHash": "dff26ffe5bc8a2eb7a84297f591aa46181b17efa8b7c977636db9cf3beeec61c", + "DocumentPropertiesHash": "e0d97e2de165b12c26e6a4d6e15358c0302a5363b2812f72efcd482cc4e2c148", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 1, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "CreateWait9": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "CreateWait8", + ], + "Properties": { + "CreateIntervalSeconds": 1, + "DeleteIntervalSeconds": 0, + "DocumentPropertiesHash": "56cd4b027684fdab25f1dce18babb64db6bc08add36605f626a0c54f74a97ab4", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -7791,7 +10150,7 @@ def add_ssl_bucket_policy(event, _): "Properties": { "CreateIntervalSeconds": 0, "DeleteIntervalSeconds": 0.5, - "DocumentPropertiesHash": "5081ead48ffcae3f2226fd136f97331dd4ff628325a06417f2abac7c3005148f", + "DocumentPropertiesHash": "81507fa8cc08b25712d6bb11838b74debfaef70c57538bc5b4a41604c53f805a", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -7813,7 +10172,48 @@ def add_ssl_bucket_policy(event, _): "Properties": { "CreateIntervalSeconds": 0, "DeleteIntervalSeconds": 0.5, - "DocumentPropertiesHash": "ed6f0fed953f12a30fce8949d3584bea716efea4cf8d55d2b8d6bff413b7c3ee", + "DocumentPropertiesHash": "136f833c9afc46d6dbb74732440b922bbeab34f14b0e2ae4e1a71e0965e09acd", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 0, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "DeletWait10": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "ASRDisableTGWAutoAcceptSharedAttachments", + "ASREnableAutoSecretRotation", + "ASREnableGuardDuty", + "ASRSetS3LifecyclePolicy", + "ASRUpdateSecretRotationPeriod", + "DeletWait9", + ], + "Properties": { + "CreateIntervalSeconds": 0, + "DeleteIntervalSeconds": 0.5, + "DocumentPropertiesHash": "59245ae3ddbfd7a056178f016a36493661aafeac24e726703cc08a7731400729", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 0, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "DeletWait11": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "ASRRemoveUnusedSecret", + "ASRRevokeUnauthorizedInboundRules", + "DeletWait10", + ], + "Properties": { + "CreateIntervalSeconds": 0, + "DeleteIntervalSeconds": 0.5, + "DocumentPropertiesHash": "c855d30b2277c8fb3f6e525a303816e91a32109f04e278c91b7e97958fed1127", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -7835,7 +10235,7 @@ def add_ssl_bucket_policy(event, _): "Properties": { "CreateIntervalSeconds": 0, "DeleteIntervalSeconds": 0.5, - "DocumentPropertiesHash": "386b3690120f0f70335f86c2272f23c7df6388bde888c7dfd94dee3ad9b586f3", + "DocumentPropertiesHash": "2d0fcae37e200f1766715e193822d8f770336256aa94a3a3d343ebd316f63b53", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -7857,7 +10257,7 @@ def add_ssl_bucket_policy(event, _): "Properties": { "CreateIntervalSeconds": 0, "DeleteIntervalSeconds": 0.5, - "DocumentPropertiesHash": "d8cb9fbe52d6cb8d12cf3f418251ea9ecf2a823155b7aeceaefc304e2999d008", + "DocumentPropertiesHash": "50ff92fe188d981a49c50a99a2a170b0bc8c070b4c6f3cc0f70ac1f3dea64c43", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -7879,7 +10279,7 @@ def add_ssl_bucket_policy(event, _): "Properties": { "CreateIntervalSeconds": 0, "DeleteIntervalSeconds": 0.5, - "DocumentPropertiesHash": "cbb21589711741582f016da0204b949e6bb393744d53b01dcc5383669d106dea", + "DocumentPropertiesHash": "a26ba78d0f128ca18d92c8df0267bd94d1fafe39cc465ff8073f15762b423bd0", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -7945,7 +10345,7 @@ def add_ssl_bucket_policy(event, _): "Properties": { "CreateIntervalSeconds": 0, "DeleteIntervalSeconds": 0.5, - "DocumentPropertiesHash": "971be703d38efdd2d75a17325953419f3244f959e839b5a0668b12146968ac0c", + "DocumentPropertiesHash": "a79a323f87cedc32cab2f0319b3b409458bcfb163d2c63c51fe7792a4e80456e", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, @@ -7958,13 +10358,38 @@ def add_ssl_bucket_policy(event, _): "DeletionPolicy": "Delete", "DependsOn": [ "ASRDisablePublicIPAutoAssign", + "ASREnableCloudFrontDefaultRootObject", "ASREnableDeliveryStatusLoggingForSNSTopic", + "ASREnableIMDSV2OnInstance", + "ASRRemoveCodeBuildPrivilegedMode", "DeletWait7", ], "Properties": { "CreateIntervalSeconds": 0, "DeleteIntervalSeconds": 0.5, - "DocumentPropertiesHash": "dff26ffe5bc8a2eb7a84297f591aa46181b17efa8b7c977636db9cf3beeec61c", + "DocumentPropertiesHash": "e0d97e2de165b12c26e6a4d6e15358c0302a5363b2812f72efcd482cc4e2c148", + "ServiceToken": { + "Ref": "WaitProviderServiceToken", + }, + "UpdateIntervalSeconds": 0, + }, + "Type": "Custom::Wait", + "UpdateReplacePolicy": "Delete", + }, + "DeletWait9": { + "DeletionPolicy": "Delete", + "DependsOn": [ + "ASRBlockSSMDocumentPublicAccess", + "ASRDisableUnrestrictedAccessToHighRiskPorts", + "ASREnableBucketEventNotifications", + "ASREnablePrivateRepositoryScanning", + "ASRSetCloudFrontOriginDomain", + "DeletWait8", + ], + "Properties": { + "CreateIntervalSeconds": 0, + "DeleteIntervalSeconds": 0.5, + "DocumentPropertiesHash": "56cd4b027684fdab25f1dce18babb64db6bc08add36605f626a0c54f74a97ab4", "ServiceToken": { "Ref": "WaitProviderServiceToken", }, diff --git a/source/test/__snapshots__/solution_deploy.test.ts.snap b/source/test/__snapshots__/solution_deploy.test.ts.snap index 6f412972..7ee2b6e8 100644 --- a/source/test/__snapshots__/solution_deploy.test.ts.snap +++ b/source/test/__snapshots__/solution_deploy.test.ts.snap @@ -3,6 +3,41 @@ exports[`Test if the Stack has all the resources. 1`] = ` { "Conditions": { + "ShouldDeployAppReg": { + "Fn::Not": [ + { + "Fn::Equals": [ + { + "Ref": "AWS::Partition", + }, + "aws-cn", + ], + }, + ], + }, + "isUsingCloudWatchMetrics": { + "Fn::Equals": [ + { + "Ref": "UseCloudWatchMetrics", + }, + "yes", + ], + }, + "isUsingCloudWatchMetricsAlarms": { + "Fn::And": [ + { + "Condition": "isUsingCloudWatchMetrics", + }, + { + "Fn::Equals": [ + { + "Ref": "UseCloudWatchMetricsAlarms", + }, + "yes", + ], + }, + ], + }, "loadAFSBPCond": { "Fn::Equals": [ { @@ -11,6 +46,16 @@ exports[`Test if the Stack has all the resources. 1`] = ` "yes", ], }, + "loadAFSBPCondAndShouldDeployAppReg": { + "Fn::And": [ + { + "Condition": "ShouldDeployAppReg", + }, + { + "Condition": "loadAFSBPCond", + }, + ], + }, "loadCIS120Cond": { "Fn::Equals": [ { @@ -19,6 +64,16 @@ exports[`Test if the Stack has all the resources. 1`] = ` "yes", ], }, + "loadCIS120CondAndShouldDeployAppReg": { + "Fn::And": [ + { + "Condition": "ShouldDeployAppReg", + }, + { + "Condition": "loadCIS120Cond", + }, + ], + }, "loadCIS140Cond": { "Fn::Equals": [ { @@ -27,6 +82,34 @@ exports[`Test if the Stack has all the resources. 1`] = ` "yes", ], }, + "loadCIS140CondAndShouldDeployAppReg": { + "Fn::And": [ + { + "Condition": "ShouldDeployAppReg", + }, + { + "Condition": "loadCIS140Cond", + }, + ], + }, + "loadNIST80053Cond": { + "Fn::Equals": [ + { + "Ref": "LoadNIST80053AdminStack", + }, + "yes", + ], + }, + "loadNIST80053CondAndShouldDeployAppReg": { + "Fn::And": [ + { + "Condition": "ShouldDeployAppReg", + }, + { + "Condition": "loadNIST80053Cond", + }, + ], + }, "loadPCI321Cond": { "Fn::Equals": [ { @@ -35,6 +118,16 @@ exports[`Test if the Stack has all the resources. 1`] = ` "yes", ], }, + "loadPCI321CondAndShouldDeployAppReg": { + "Fn::And": [ + { + "Condition": "ShouldDeployAppReg", + }, + { + "Condition": "loadPCI321Cond", + }, + ], + }, "loadSCCond": { "Fn::Equals": [ { @@ -43,6 +136,16 @@ exports[`Test if the Stack has all the resources. 1`] = ` "yes", ], }, + "loadSCCondAndShouldDeployAppReg": { + "Fn::And": [ + { + "Condition": "ShouldDeployAppReg", + }, + { + "Condition": "loadSCCond", + }, + ], + }, }, "Mappings": { "Solution": { @@ -61,7 +164,7 @@ exports[`Test if the Stack has all the resources. 1`] = ` }, }, "mappings": { - "sendAnonymousMetrics": { + "sendAnonymizedMetrics": { "data": "Yes", }, }, @@ -77,11 +180,41 @@ exports[`Test if the Stack has all the resources. 1`] = ` "LoadAFSBPAdminStack", "LoadCIS120AdminStack", "LoadCIS140AdminStack", + "LoadNIST80053AdminStack", "LoadPCI321AdminStack", "LoadSCAdminStack", ], }, + { + "Label": { + "default": "Orchestrator Configuration", + }, + "Parameters": [ + "ReuseOrchestratorLogGroup", + ], + }, + { + "Label": { + "default": "CloudWatch Metrics", + }, + "Parameters": [ + "UseCloudWatchMetrics", + "UseCloudWatchMetricsAlarms", + "StateMachineExecutionsAlarmThreshold", + ], + }, ], + "ParameterLabels": { + "StateMachineExecutionsAlarmThreshold": { + "default": "StateMachineExecutionsAlarmThreshold", + }, + "UseCloudWatchMetrics": { + "default": "UseCloudWatchMetrics", + }, + "UseCloudWatchMetricsAlarms": { + "default": "UseCloudWatchMetricsAlarms", + }, + }, }, }, "Parameters": { @@ -112,6 +245,15 @@ exports[`Test if the Stack has all the resources. 1`] = ` "Description": "Load CloudWatch Event Rules for CIS140?", "Type": "String", }, + "LoadNIST80053AdminStack": { + "AllowedValues": [ + "yes", + "no", + ], + "Default": "yes", + "Description": "Load CloudWatch Event Rules for NIST80053?", + "Type": "String", + }, "LoadPCI321AdminStack": { "AllowedValues": [ "yes", @@ -139,9 +281,111 @@ exports[`Test if the Stack has all the resources. 1`] = ` "Description": "Reuse existing Orchestrator Log Group? Choose "yes" if the log group already exists, else "no"", "Type": "String", }, + "StateMachineExecutionsAlarmThreshold": { + "Default": 1000, + "Description": "Number of executions in one period to trigger the state machine executions alarm", + "Type": "Number", + }, + "UseCloudWatchMetrics": { + "AllowedValues": [ + "yes", + "no", + ], + "Default": "yes", + "Description": "Enable collection of operational metrics and create a CloudWatch dashboard to monitor solution operations", + "Type": "String", + }, + "UseCloudWatchMetricsAlarms": { + "AllowedValues": [ + "yes", + "no", + ], + "Default": "yes", + "Description": "Create CloudWatch Alarms for gathered metrics", + "Type": "String", + }, }, "Resources": { + "ASRAlarmTopic7CEFBDF9": { + "Condition": "isUsingCloudWatchMetricsAlarms", + "Properties": { + "DisplayName": "ASR Alarm Topic (SO0111)", + "KmsMasterKeyId": { + "Fn::GetAtt": [ + "SHARRkeyE6BD0F56", + "Arn", + ], + }, + "TopicName": "SO0111-ASR_Alarm_Topic", + }, + "Type": "AWS::SNS::Topic", + }, + "ASRDeploymentCustomResourceLambda6AFCEDA5": { + "DependsOn": [ + "stackRole9B4D53CC", + ], + "Properties": { + "Code": { + "S3Bucket": "solutions-eu-west-1", + "S3Key": "aws-security-hub-automated-response-and-remediation/v1.0.0/lambda/deployment_metrics_custom_resource.zip", + }, + "Description": "ASR - Handles deployment related custom actions", + "Environment": { + "Variables": { + "AWS_PARTITION": { + "Ref": "AWS::Partition", + }, + "LOG_LEVEL": "INFO", + "SOLUTION_ID": "SO0111", + "SOLUTION_VERSION": "v1.0.0", + }, + }, + "Handler": "deployment_metrics_custom_resource.lambda_handler", + "Layers": [ + { + "Ref": "SharrLambdaLayer5BF8F147", + }, + ], + "MemorySize": 256, + "Role": { + "Fn::GetAtt": [ + "stackRole9B4D53CC", + "Arn", + ], + }, + "Runtime": "python3.9", + "Timeout": 5, + }, + "Type": "AWS::Lambda::Function", + }, + "ASRDeploymentMetricsCustomResource": { + "DeletionPolicy": "Delete", + "Properties": { + "CloudWatchMetricsDashboardEnabled": { + "Ref": "UseCloudWatchMetrics", + }, + "ServiceToken": { + "Fn::GetAtt": [ + "ASRDeploymentCustomResourceLambda6AFCEDA5", + "Arn", + ], + }, + }, + "Type": "Custom::DeploymentMetrics", + "UpdateReplacePolicy": "Delete", + }, + "ASRSendCloudWatchMetricsD6C71A5B": { + "Condition": "isUsingCloudWatchMetrics", + "Properties": { + "Description": "Flag to enable or disable sending cloudwatch metrics.", + "Name": "/Solutions/SO0111/sendCloudwatchMetrics", + "Type": "String", + "Value": "yes", + }, + "Type": "AWS::SSM::Parameter", + }, "AppRegistry968496A3": { + "Condition": "ShouldDeployAppReg", "Properties": { "Description": "Service Catalog application to track and manage all your resources for the solution automated-security-response-on-aws", "Name": { @@ -201,6 +445,7 @@ exports[`Test if the Stack has all the resources. 1`] = ` "Type": "AWS::ServiceCatalogAppRegistry::Application", }, "AppRegistryAssociation": { + "Condition": "ShouldDeployAppReg", "Properties": { "Application": { "Fn::GetAtt": [ @@ -216,6 +461,7 @@ exports[`Test if the Stack has all the resources. 1`] = ` "Type": "AWS::ServiceCatalogAppRegistry::ResourceAssociation", }, "AppRegistryAttributeGroupAssociation58e755b9eb72544DB135": { + "Condition": "ShouldDeployAppReg", "Properties": { "Application": { "Fn::GetAtt": [ @@ -233,6 +479,7 @@ exports[`Test if the Stack has all the resources. 1`] = ` "Type": "AWS::ServiceCatalogAppRegistry::AttributeGroupAssociation", }, "AppRegistryResourceAssociation142839FB0": { + "Condition": "ShouldDeployAppReg", "DependsOn": [ "orchestratorNestedLogStackNestedStackNestedLogStackNestedStackResourceE4E042A6", ], @@ -251,7 +498,7 @@ exports[`Test if the Stack has all the resources. 1`] = ` "Type": "AWS::ServiceCatalogAppRegistry::ResourceAssociation", }, "AppRegistryResourceAssociation2BB1A3300": { - "Condition": "loadAFSBPCond", + "Condition": "loadAFSBPCondAndShouldDeployAppReg", "DependsOn": [ "PlaybookAdminStackAFSBP", ], @@ -270,7 +517,7 @@ exports[`Test if the Stack has all the resources. 1`] = ` "Type": "AWS::ServiceCatalogAppRegistry::ResourceAssociation", }, "AppRegistryResourceAssociation3BEAC7BB7": { - "Condition": "loadCIS120Cond", + "Condition": "loadCIS120CondAndShouldDeployAppReg", "DependsOn": [ "PlaybookAdminStackCIS120", ], @@ -289,7 +536,7 @@ exports[`Test if the Stack has all the resources. 1`] = ` "Type": "AWS::ServiceCatalogAppRegistry::ResourceAssociation", }, "AppRegistryResourceAssociation46F7B9873": { - "Condition": "loadCIS140Cond", + "Condition": "loadCIS140CondAndShouldDeployAppReg", "DependsOn": [ "PlaybookAdminStackCIS140", ], @@ -308,7 +555,26 @@ exports[`Test if the Stack has all the resources. 1`] = ` "Type": "AWS::ServiceCatalogAppRegistry::ResourceAssociation", }, "AppRegistryResourceAssociation5FAA30631": { - "Condition": "loadPCI321Cond", + "Condition": "loadNIST80053CondAndShouldDeployAppReg", + "DependsOn": [ + "PlaybookAdminStackNIST80053", + ], + "Properties": { + "Application": { + "Fn::GetAtt": [ + "AppRegistry968496A3", + "Id", + ], + }, + "Resource": { + "Ref": "PlaybookAdminStackNIST80053", + }, + "ResourceType": "CFN_STACK", + }, + "Type": "AWS::ServiceCatalogAppRegistry::ResourceAssociation", + }, + "AppRegistryResourceAssociation62B582FF5": { + "Condition": "loadPCI321CondAndShouldDeployAppReg", "DependsOn": [ "PlaybookAdminStackPCI321", ], @@ -326,8 +592,8 @@ exports[`Test if the Stack has all the resources. 1`] = ` }, "Type": "AWS::ServiceCatalogAppRegistry::ResourceAssociation", }, - "AppRegistryResourceAssociation62B582FF5": { - "Condition": "loadSCCond", + "AppRegistryResourceAssociation7A2A1D7B5": { + "Condition": "loadSCCondAndShouldDeployAppReg", "DependsOn": [ "PlaybookAdminStackSC", ], @@ -350,14 +616,6 @@ exports[`Test if the Stack has all the resources. 1`] = ` "createCustomActionRoleF0047414", ], "Metadata": { - "cdk_nag": { - "rules_to_suppress": [ - { - "id": "AwsSolutions-L1", - "reason": "Will upgrade in next release to prioritize patch", - }, - ], - }, "cfn_nag": { "rules_to_suppress": [ { @@ -389,10 +647,10 @@ exports[`Test if the Stack has all the resources. 1`] = ` "SOLUTION_ID": "SO0111", "SOLUTION_VERSION": "v1.0.0", "log_level": "info", - "sendAnonymousMetrics": { + "sendAnonymizedMetrics": { "Fn::FindInMap": [ "mappings", - "sendAnonymousMetrics", + "sendAnonymizedMetrics", "data", ], }, @@ -418,6 +676,7 @@ exports[`Test if the Stack has all the resources. 1`] = ` "Type": "AWS::Lambda::Function", }, "DefaultApplicationAttributesFC1CC26B": { + "Condition": "ShouldDeployAppReg", "Properties": { "Attributes": { "applicationType": { @@ -464,6 +723,80 @@ exports[`Test if the Stack has all the resources. 1`] = ` }, "Type": "AWS::ServiceCatalogAppRegistry::AttributeGroup", }, + "FailedAssumeRoleAlarm06397028": { + "Condition": "isUsingCloudWatchMetricsAlarms", + "Properties": { + "ActionsEnabled": true, + "AlarmActions": [ + { + "Ref": "ASRAlarmTopic7CEFBDF9", + }, + ], + "AlarmDescription": "ASR Runbook Failed to assume role in an account. This indicates that a remediation was attempted in an account that does not have ASR deployed.", + "AlarmName": "ASR-RunbookAssumeRoleFailure", + "ComparisonOperator": "GreaterThanOrEqualToThreshold", + "DatapointsToAlarm": 1, + "EvaluationPeriods": 1, + "Metrics": [ + { + "Id": "m1", + "Label": "Runbook Assume Role Failures", + "MetricStat": { + "Metric": { + "MetricName": "AssumeRoleFailure", + "Namespace": "ASR", + }, + "Period": 86400, + "Stat": "Sum", + }, + "ReturnData": true, + }, + ], + "Threshold": 1, + "TreatMissingData": "notBreaching", + }, + "Type": "AWS::CloudWatch::Alarm", + }, + "NoRemediationErrorAlarm20FFD8DF": { + "Condition": "isUsingCloudWatchMetricsAlarms", + "Properties": { + "ActionsEnabled": true, + "AlarmActions": [ + { + "Ref": "ASRAlarmTopic7CEFBDF9", + }, + ], + "AlarmDescription": "Remediation failed with NOREMEDIATION result. This indicates a remediation was attempted for an unsupported remediation", + "AlarmName": "ASR-NoRemediation", + "ComparisonOperator": "GreaterThanOrEqualToThreshold", + "DatapointsToAlarm": 1, + "EvaluationPeriods": 1, + "Metrics": [ + { + "Id": "m1", + "Label": "NOREMEDIATION", + "MetricStat": { + "Metric": { + "Dimensions": [ + { + "Name": "Outcome", + "Value": "NOREMEDIATION", + }, + ], + "MetricName": "RemediationOutcome", + "Namespace": "ASR", + }, + "Period": 86400, + "Stat": "Sum", + }, + "ReturnData": true, + }, + ], + "Threshold": 1, + "TreatMissingData": "notBreaching", + }, + "Type": "AWS::CloudWatch::Alarm", + }, "PlaybookAdminStackAFSBP": { "Condition": "loadAFSBPCond", "DeletionPolicy": "Delete", @@ -572,6 +905,42 @@ exports[`Test if the Stack has all the resources. 1`] = ` "Type": "AWS::CloudFormation::Stack", "UpdateReplacePolicy": "Delete", }, + "PlaybookAdminStackNIST80053": { + "Condition": "loadNIST80053Cond", + "DeletionPolicy": "Delete", + "DependsOn": [ + "orchestratorSHARROrchestratorArn0ACC7B05", + "orchestratorStateMachine77C3F8FB", + ], + "Properties": { + "TemplateURL": { + "Fn::Join": [ + "", + [ + "https://", + { + "Fn::FindInMap": [ + "SourceCode", + "General", + "S3Bucket", + ], + }, + "-reference.s3.amazonaws.com/", + { + "Fn::FindInMap": [ + "SourceCode", + "General", + "KeyPrefix", + ], + }, + "/playbooks/NIST80053Stack.template", + ], + ], + }, + }, + "Type": "AWS::CloudFormation::Stack", + "UpdateReplacePolicy": "Delete", + }, "PlaybookAdminStackPCI321": { "Condition": "loadPCI321Cond", "DeletionPolicy": "Delete", @@ -752,6 +1121,64 @@ exports[`Test if the Stack has all the resources. 1`] = ` }, "Type": "AWS::Events::Rule", }, + "RemediationDashboard7EC0D4B1": { + "Condition": "isUsingCloudWatchMetrics", + "Properties": { + "DashboardBody": { + "Fn::Join": [ + "", + [ + "{"start":"-P7D","widgets":[{"type":"metric","width":6,"height":6,"x":0,"y":0,"properties":{"view":"timeSeries","title":"State Machine Executions","region":"", + { + "Ref": "AWS::Region", + }, + "","metrics":[["AWS/States","ExecutionsStarted","StateMachineArn","", + { + "Ref": "orchestratorStateMachine77C3F8FB", + }, + "",{"label":"Remediations started","period":86400,"stat":"Sum"}]],"annotations":{"horizontal":[{"label":"Remediations started >= ", + { + "Ref": "StateMachineExecutionsAlarmThreshold", + }, + " for 1 datapoints within 1440 minutes","value":", + { + "Ref": "StateMachineExecutionsAlarmThreshold", + }, + ","yAxis":"left"}]},"yAxis":{}}},{"type":"metric","width":6,"height":6,"x":6,"y":0,"properties":{"view":"timeSeries","title":"Remediation Outcomes","region":"", + { + "Ref": "AWS::Region", + }, + "","metrics":[[{"label":"FAILURE","expression":"SUM([m1+m2+m3+m4])","period":86400}],["ASR","RemediationOutcome","Outcome","LAMBDAERROR",{"label":"LAMBDAERROR","period":86400,"stat":"Sum","visible":false,"id":"m1"}],["ASR","RemediationOutcome","Outcome","REMEDIATIONNOTTACTIVE",{"label":"REMEDIATIONNOTTACTIVE","period":86400,"stat":"Sum","visible":false,"id":"m2"}],["ASR","RemediationOutcome","Outcome","NOREMEDIATION",{"label":"NOREMEDIATION","period":86400,"stat":"Sum","visible":false,"id":"m3"}],["ASR","RemediationOutcome","Outcome","STANDARDNOTENABLED",{"label":"STANDARDNOTENABLED","period":86400,"stat":"Sum","visible":false,"id":"m4"}],["ASR","RemediationOutcome","Outcome","SUCCESS",{"label":"SUCCESS","period":86400,"stat":"Sum"}]],"yAxis":{"left":{"showUnits":false}}}},{"type":"metric","width":6,"height":6,"x":12,"y":0,"properties":{"view":"timeSeries","title":"Remediation Failures by Type","region":"", + { + "Ref": "AWS::Region", + }, + "","metrics":[["ASR","RemediationOutcome","Outcome","LAMBDAERROR",{"label":"LAMBDAERROR","period":86400,"stat":"Sum"}],["ASR","RemediationOutcome","Outcome","REMEDIATIONNOTTACTIVE",{"label":"REMEDIATIONNOTTACTIVE","period":86400,"stat":"Sum"}],["ASR","RemediationOutcome","Outcome","NOREMEDIATION",{"label":"NOREMEDIATION","period":86400,"stat":"Sum"}],["ASR","RemediationOutcome","Outcome","STANDARDNOTENABLED",{"label":"STANDARDNOTENABLED","period":86400,"stat":"Sum"}]],"annotations":{"horizontal":[{"label":"NOREMEDIATION >= 1 for 1 datapoints within 1440 minutes","value":1,"yAxis":"left"}]},"yAxis":{"left":{"showUnits":false}}}},{"type":"text","width":6,"height":6,"x":18,"y":0,"properties":{"markdown":"\\n## Remediation Failures by Type\\nThis widget displays the frequency of different remediation outcomes.\\n\\nIf there is an increase in \`NOREMEDIATION\` results, this indicates that remediations are being attempted for remediations not currently included in ASR. You should verify that this is not caused by a modified automatic remediation rule.\\n"}},{"type":"metric","width":6,"height":6,"x":0,"y":6,"properties":{"view":"timeSeries","title":"Remediation Scheduling Queue Length","region":"", + { + "Ref": "AWS::Region", + }, + "","metrics":[["AWS/SQS","ApproximateNumberOfMessagesVisible","QueueName","", + { + "Fn::GetAtt": [ + "SchedulingQueueB533E3CD", + "QueueName", + ], + }, + "",{"label":"Queue Length","period":86400,"stat":"Maximum"}]],"yAxis":{}}},{"type":"metric","width":6,"height":6,"x":6,"y":6,"properties":{"view":"timeSeries","title":"Maximum Remediation Delay","region":"", + { + "Ref": "AWS::Region", + }, + "","metrics":[["ASR","RemediationSchedulingDelay",{"label":"Delay","period":86400,"stat":"Maximum"}]],"yAxis":{}}},{"type":"text","width":6,"height":6,"x":12,"y":6,"properties":{"markdown":"\\n## Remediation Scheduling Widgets\\nThese widgets are related to scheduling of remediations.\\n\\nTriggered remediations are inserted into a queue and a scheduling Lambda picks them up to schedule the remediation execution.\\n\\nThe queue length represents the maximum number of triggered remediations that were waiting to be scheduled during that period.\\n\\nThe maximum delay is how far out, in seconds, that the scheduling Lambda has scheduled a remediation for execution.\\n"}},{"type":"metric","width":6,"height":6,"x":0,"y":12,"properties":{"view":"timeSeries","title":"Runbook Assume Role Failures","region":"", + { + "Ref": "AWS::Region", + }, + "","metrics":[["ASR","AssumeRoleFailure",{"label":"Runbook Assume Role Failures","period":86400,"stat":"Sum"}]],"annotations":{"horizontal":[{"label":"Runbook Assume Role Failures >= 1 for 1 datapoints within 1440 minutes","value":1,"yAxis":"left"}]},"yAxis":{"left":{"showUnits":false}}}},{"type":"text","width":6,"height":6,"x":6,"y":12,"properties":{"markdown":"\\n## Runbook Assume Role Failures\\nThis widget displays the frequency of the remediation lambda failing to assume the role necessary to remediate on a different account.\\n\\nThis may indicate that ASR is attempting to remediate on a spoke account that does not have ASR installed.\\n"}}]}", + ], + ], + }, + "DashboardName": "ASR-Remediation-Metrics-Dashboard", + }, + "Type": "AWS::CloudWatch::Dashboard", + }, "SHARRKeyC551FE02": { "Properties": { "Description": "KMS Customer Managed Key that SHARR will use to encrypt data", @@ -768,7 +1195,7 @@ exports[`Test if the Stack has all the resources. 1`] = ` }, "SHARRSNSTopicB940F479": { "Properties": { - "Description": "SNS Topic ARN where SHARR will send status messages. This topic can be useful for driving additional actions, such as email notifications, trouble ticket updates.", + "Description": "SNS Topic ARN where SHARR will send status messages. This topic can be useful for driving additional actions, such as email notifications, trouble ticket updates.", "Name": "/Solutions/SO0111/SNS_Topic_ARN", "Type": "String", "Value": { @@ -780,12 +1207,12 @@ exports[`Test if the Stack has all the resources. 1`] = ` "SHARRSendAnonymousMetricsCDAE439D": { "Properties": { "Description": "Flag to enable or disable sending anonymous metrics.", - "Name": "/Solutions/SO0111/sendAnonymousMetrics", + "Name": "/Solutions/SO0111/sendAnonymizedMetrics", "Type": "String", "Value": { "Fn::FindInMap": [ "mappings", - "sendAnonymousMetrics", + "sendAnonymizedMetrics", "data", ], }, @@ -867,39 +1294,298 @@ exports[`Test if the Stack has all the resources. 1`] = ` "Resource": "*", }, { - "Action": "kms:*", - "Effect": "Allow", - "Principal": { - "AWS": { - "Fn::Join": [ - "", - [ - "arn:", - { - "Ref": "AWS::Partition", - }, - ":iam::111111111111:root", - ], - ], + "Action": "kms:*", + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition", + }, + ":iam::111111111111:root", + ], + ], + }, + }, + "Resource": "*", + }, + { + "Action": [ + "kms:Decrypt", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*", + ], + "Effect": "Allow", + "Principal": { + "Service": "cloudwatch.amazonaws.com", + }, + "Resource": "*", + }, + ], + "Version": "2012-10-17", + }, + }, + "Type": "AWS::KMS::Key", + "UpdateReplacePolicy": "Retain", + }, + "SHARRversionAC0E4F96": { + "Properties": { + "Description": "Solution version for metrics.", + "Name": "/Solutions/SO0111/version", + "Type": "String", + "Value": "v1.0.0", + }, + "Type": "AWS::SSM::Parameter", + }, + "SchedulingLambdaPolicyBDBE83CB": { + "Metadata": { + "cdk_nag": { + "rules_to_suppress": [ + { + "id": "AwsSolutions-IAM5", + "reason": "Resource * is required for CloudWatch Logs used by the Scheduling Lambda function.", + }, + ], + }, + }, + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": [ + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:PutLogEvents", + ], + "Effect": "Allow", + "Resource": "*", + }, + { + "Action": [ + "ssm:GetParameter", + "ssm:PutParameter", + ], + "Effect": "Allow", + "Resource": { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition", + }, + ":ssm:eu-west-1:111111111111:parameter/Solutions/SO0111/*", + ], + ], + }, + }, + { + "Action": "cloudwatch:PutMetricData", + "Effect": "Allow", + "Resource": "*", + }, + ], + "Version": "2012-10-17", + }, + "PolicyName": "SO0111-SHARR_Scheduling_Lambda", + "Roles": [ + { + "Ref": "SchedulingLambdaRoleAB00F55C", + }, + ], + }, + "Type": "AWS::IAM::Policy", + }, + "SchedulingLambdaRoleAB00F55C": { + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "lambda.amazonaws.com", + }, + }, + ], + "Version": "2012-10-17", + }, + "Description": "Lambda role to schedule remediations that are sent to SQS through the orchestrator", + }, + "Type": "AWS::IAM::Role", + }, + "SchedulingLambdaRoleDefaultPolicy73C1B49B": { + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": [ + "states:SendTaskSuccess", + "states:SendTaskFailure", + "states:SendTaskHeartbeat", + ], + "Effect": "Allow", + "Resource": { + "Ref": "orchestratorStateMachine77C3F8FB", + }, + }, + { + "Action": [ + "dynamodb:BatchGetItem", + "dynamodb:GetRecords", + "dynamodb:GetShardIterator", + "dynamodb:Query", + "dynamodb:GetItem", + "dynamodb:Scan", + "dynamodb:ConditionCheckItem", + "dynamodb:BatchWriteItem", + "dynamodb:PutItem", + "dynamodb:UpdateItem", + "dynamodb:DeleteItem", + "dynamodb:DescribeTable", + ], + "Effect": "Allow", + "Resource": [ + { + "Fn::GetAtt": [ + "SchedulingTable1EC09B43", + "Arn", + ], + }, + { + "Ref": "AWS::NoValue", + }, + ], + }, + { + "Action": [ + "sqs:ReceiveMessage", + "sqs:ChangeMessageVisibility", + "sqs:GetQueueUrl", + "sqs:DeleteMessage", + "sqs:GetQueueAttributes", + ], + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "SchedulingQueueB533E3CD", + "Arn", + ], + }, + }, + { + "Action": "kms:Decrypt", + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "SHARRkeyE6BD0F56", + "Arn", + ], + }, + }, + ], + "Version": "2012-10-17", + }, + "PolicyName": "SchedulingLambdaRoleDefaultPolicy73C1B49B", + "Roles": [ + { + "Ref": "SchedulingLambdaRoleAB00F55C", + }, + ], + }, + "Type": "AWS::IAM::Policy", + }, + "SchedulingQueueB533E3CD": { + "DeletionPolicy": "Delete", + "Properties": { + "KmsMasterKeyId": { + "Fn::GetAtt": [ + "SHARRkeyE6BD0F56", + "Arn", + ], + }, + "RedrivePolicy": { + "deadLetterTargetArn": { + "Fn::GetAtt": [ + "deadLetterSchedulingQueue9BCE9EA8", + "Arn", + ], + }, + "maxReceiveCount": 10, + }, + }, + "Type": "AWS::SQS::Queue", + "UpdateReplacePolicy": "Delete", + }, + "SchedulingQueuePolicy36FAAC29": { + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": "sqs:*", + "Condition": { + "Bool": { + "aws:SecureTransport": "false", }, }, - "Resource": "*", + "Effect": "Deny", + "Principal": { + "AWS": "*", + }, + "Resource": { + "Fn::GetAtt": [ + "SchedulingQueueB533E3CD", + "Arn", + ], + }, }, ], "Version": "2012-10-17", }, + "Queues": [ + { + "Ref": "SchedulingQueueB533E3CD", + }, + ], }, - "Type": "AWS::KMS::Key", - "UpdateReplacePolicy": "Retain", + "Type": "AWS::SQS::QueuePolicy", }, - "SHARRversionAC0E4F96": { + "SchedulingTable1EC09B43": { + "DeletionPolicy": "Retain", "Properties": { - "Description": "Solution version for metrics.", - "Name": "/Solutions/SO0111/version", - "Type": "String", - "Value": "v1.0.0", + "AttributeDefinitions": [ + { + "AttributeName": "AccountID-Region", + "AttributeType": "S", + }, + ], + "KeySchema": [ + { + "AttributeName": "AccountID-Region", + "KeyType": "HASH", + }, + ], + "PointInTimeRecoverySpecification": { + "PointInTimeRecoveryEnabled": true, + }, + "ProvisionedThroughput": { + "ReadCapacityUnits": 5, + "WriteCapacityUnits": 5, + }, + "SSESpecification": { + "SSEEnabled": true, + }, + "TimeToLiveSpecification": { + "AttributeName": "TTL", + "Enabled": true, + }, }, - "Type": "AWS::SSM::Parameter", + "Type": "AWS::DynamoDB::Table", + "UpdateReplacePolicy": "Retain", }, "SharrLambdaLayer5BF8F147": { "Properties": { @@ -915,19 +1601,54 @@ exports[`Test if the Stack has all the resources. 1`] = ` }, "Type": "AWS::Lambda::LayerVersion", }, + "StateMachineExecutions0993FE1A": { + "Condition": "isUsingCloudWatchMetricsAlarms", + "Properties": { + "AlarmActions": [ + { + "Ref": "ASRAlarmTopic7CEFBDF9", + }, + ], + "AlarmDescription": "Number of executed remediations is higher than normal. Check other metrics.", + "AlarmName": "ASR-StateMachineExecutions", + "ComparisonOperator": "GreaterThanOrEqualToThreshold", + "DatapointsToAlarm": 1, + "EvaluationPeriods": 1, + "Metrics": [ + { + "Id": "m1", + "Label": "Remediations started", + "MetricStat": { + "Metric": { + "Dimensions": [ + { + "Name": "StateMachineArn", + "Value": { + "Ref": "orchestratorStateMachine77C3F8FB", + }, + }, + ], + "MetricName": "ExecutionsStarted", + "Namespace": "AWS/States", + }, + "Period": 86400, + "Stat": "Sum", + }, + "ReturnData": true, + }, + ], + "Threshold": { + "Ref": "StateMachineExecutionsAlarmThreshold", + }, + "TreatMissingData": "notBreaching", + }, + "Type": "AWS::CloudWatch::Alarm", + }, "checkSSMDocState06AC440F": { "DependsOn": [ "orchestratorRole46A9F242", ], "Metadata": { - "cdk_nag": { - "rules_to_suppress": [ - { - "id": "AwsSolutions-L1", - "reason": "Will upgrade in next release to prioritize patch", - }, - ], - }, "cfn_nag": { "rules_to_suppress": [ { @@ -1084,19 +1805,57 @@ exports[`Test if the Stack has all the resources. 1`] = ` }, "Type": "AWS::IAM::Role", }, + "deadLetterSchedulingQueue9BCE9EA8": { + "DeletionPolicy": "Delete", + "Properties": { + "KmsMasterKeyId": { + "Fn::GetAtt": [ + "SHARRkeyE6BD0F56", + "Arn", + ], + }, + }, + "Type": "AWS::SQS::Queue", + "UpdateReplacePolicy": "Delete", + }, + "deadLetterSchedulingQueuePolicy87B26533": { + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": "sqs:*", + "Condition": { + "Bool": { + "aws:SecureTransport": "false", + }, + }, + "Effect": "Deny", + "Principal": { + "AWS": "*", + }, + "Resource": { + "Fn::GetAtt": [ + "deadLetterSchedulingQueue9BCE9EA8", + "Arn", + ], + }, + }, + ], + "Version": "2012-10-17", + }, + "Queues": [ + { + "Ref": "deadLetterSchedulingQueue9BCE9EA8", + }, + ], + }, + "Type": "AWS::SQS::QueuePolicy", + }, "execAutomation5D89E251": { "DependsOn": [ "orchestratorRole46A9F242", ], "Metadata": { - "cdk_nag": { - "rules_to_suppress": [ - { - "id": "AwsSolutions-L1", - "reason": "Will upgrade in next release to prioritize patch", - }, - ], - }, "cfn_nag": { "rules_to_suppress": [ { @@ -1154,14 +1913,6 @@ exports[`Test if the Stack has all the resources. 1`] = ` "orchestratorRole46A9F242", ], "Metadata": { - "cdk_nag": { - "rules_to_suppress": [ - { - "id": "AwsSolutions-L1", - "reason": "Will upgrade in next release to prioritize patch", - }, - ], - }, "cfn_nag": { "rules_to_suppress": [ { @@ -1220,14 +1971,6 @@ exports[`Test if the Stack has all the resources. 1`] = ` "orchestratorRole46A9F242", ], "Metadata": { - "cdk_nag": { - "rules_to_suppress": [ - { - "id": "AwsSolutions-L1", - "reason": "Will upgrade in next release to prioritize patch", - }, - ], - }, "cfn_nag": { "rules_to_suppress": [ { @@ -1369,6 +2112,11 @@ exports[`Test if the Stack has all the resources. 1`] = ` ], }, }, + { + "Action": "cloudwatch:PutMetricData", + "Effect": "Allow", + "Resource": "*", + }, ], "Version": "2012-10-17", }, @@ -1742,16 +2490,34 @@ exports[`Test if the Stack has all the resources. 1`] = ` "kms:GenerateDataKey", ], "Effect": "Allow", - "Resource": { - "Fn::Join": [ - "", - [ - "arn:", - { - "Ref": "AWS::Partition", - }, - ":kms:eu-west-1:111111111111:alias/SO0111-SHARR-Key", + "Resource": [ + { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition", + }, + ":kms:eu-west-1:111111111111:alias/SO0111-SHARR-Key", + ], + ], + }, + { + "Fn::GetAtt": [ + "SHARRKeyC551FE02", + "Value", ], + }, + ], + }, + { + "Action": "sqs:SendMessage", + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "SchedulingQueueB533E3CD", + "Arn", ], }, }, @@ -1841,7 +2607,7 @@ exports[`Test if the Stack has all the resources. 1`] = ` "Arn", ], }, - "","Payload.$":"$"}},"Automation Document is not Active":{"Type":"Pass","Parameters":{"Notification":{"Message.$":"States.Format('Automation Document ({}) is not active ({}) in the member account({}).', $.AutomationDocId, $.AutomationDocument.DocState, $.Finding.AwsAccountId)","State.$":"States.Format('REMEDIATIONNOTACTIVE')","updateSecHub":"yes"},"EventType.$":"$.EventType","Finding.$":"$.Finding","AccountId.$":"$.AutomationDocument.AccountId","AutomationDocId.$":"$.AutomationDocument.AutomationDocId","RemediationRole.$":"$.AutomationDocument.RemediationRole","ControlId.$":"$.AutomationDocument.ControlId","SecurityStandard.$":"$.AutomationDocument.SecurityStandard","SecurityStandardVersion.$":"$.AutomationDocument.SecurityStandardVersion"},"Next":"notify"},"Automation Doc Active?":{"Type":"Choice","Choices":[{"Variable":"$.AutomationDocument.DocState","StringEquals":"ACTIVE","Next":"Execute Remediation"},{"Variable":"$.AutomationDocument.DocState","StringEquals":"NOTACTIVE","Next":"Automation Document is not Active"},{"Variable":"$.AutomationDocument.DocState","StringEquals":"NOTENABLED","Next":"Security Standard is not enabled"},{"Variable":"$.AutomationDocument.DocState","StringEquals":"NOTFOUND","Next":"No Remediation for Control"}],"Default":"check_ssm_doc_state Error"},"Get Automation Document State":{"Next":"Automation Doc Active?","Retry":[{"ErrorEquals":["Lambda.ServiceException","Lambda.AWSLambdaException","Lambda.SdkClientException"],"IntervalSeconds":2,"MaxAttempts":6,"BackoffRate":2}],"Catch":[{"ErrorEquals":["States.ALL"],"Next":"Orchestrator Failed"}],"Type":"Task","Comment":"Get the status of the remediation automation document in the target account","TimeoutSeconds":60,"ResultPath":"$.AutomationDocument","ResultSelector":{"DocState.$":"$.Payload.status","Message.$":"$.Payload.message","SecurityStandard.$":"$.Payload.securitystandard","SecurityStandardVersion.$":"$.Payload.securitystandardversion","SecurityStandardSupported.$":"$.Payload.standardsupported","ControlId.$":"$.Payload.controlid","AccountId.$":"$.Payload.accountid","RemediationRole.$":"$.Payload.remediationrole","AutomationDocId.$":"$.Payload.automationdocid","ResourceRegion.$":"$.Payload.resourceregion"},"Resource":"arn:", + "","Payload.$":"$"}},"Automation Document is not Active":{"Type":"Pass","Parameters":{"Notification":{"Message.$":"States.Format('Automation Document ({}) is not active ({}) in the member account({}).', $.AutomationDocId, $.AutomationDocument.DocState, $.Finding.AwsAccountId)","State.$":"States.Format('REMEDIATIONNOTACTIVE')","updateSecHub":"yes"},"EventType.$":"$.EventType","Finding.$":"$.Finding","AccountId.$":"$.AutomationDocument.AccountId","AutomationDocId.$":"$.AutomationDocument.AutomationDocId","RemediationRole.$":"$.AutomationDocument.RemediationRole","ControlId.$":"$.AutomationDocument.ControlId","SecurityStandard.$":"$.AutomationDocument.SecurityStandard","SecurityStandardVersion.$":"$.AutomationDocument.SecurityStandardVersion"},"Next":"notify"},"Automation Doc Active?":{"Type":"Choice","Choices":[{"Variable":"$.AutomationDocument.DocState","StringEquals":"ACTIVE","Next":"Send Task Token"},{"Variable":"$.AutomationDocument.DocState","StringEquals":"NOTACTIVE","Next":"Automation Document is not Active"},{"Variable":"$.AutomationDocument.DocState","StringEquals":"NOTENABLED","Next":"Security Standard is not enabled"},{"Variable":"$.AutomationDocument.DocState","StringEquals":"NOTFOUND","Next":"No Remediation for Control"}],"Default":"check_ssm_doc_state Error"},"Get Automation Document State":{"Next":"Automation Doc Active?","Retry":[{"ErrorEquals":["Lambda.ServiceException","Lambda.AWSLambdaException","Lambda.SdkClientException"],"IntervalSeconds":2,"MaxAttempts":6,"BackoffRate":2}],"Catch":[{"ErrorEquals":["States.ALL"],"Next":"Orchestrator Failed"}],"Type":"Task","Comment":"Get the status of the remediation automation document in the target account","TimeoutSeconds":60,"ResultPath":"$.AutomationDocument","ResultSelector":{"DocState.$":"$.Payload.status","Message.$":"$.Payload.message","SecurityStandard.$":"$.Payload.securitystandard","SecurityStandardVersion.$":"$.Payload.securitystandardversion","SecurityStandardSupported.$":"$.Payload.standardsupported","ControlId.$":"$.Payload.controlid","AccountId.$":"$.Payload.accountid","RemediationRole.$":"$.Payload.remediationrole","AutomationDocId.$":"$.Payload.automationdocid","ResourceRegion.$":"$.Payload.resourceregion"},"Resource":"arn:", { "Ref": "AWS::Partition", }, @@ -1863,7 +2629,15 @@ exports[`Test if the Stack has all the resources. 1`] = ` "Arn", ], }, - "","Payload.$":"$"}},"Orchestrator Failed":{"Type":"Pass","Parameters":{"Notification":{"Message.$":"States.Format('Orchestrator failed: {}', $.Error)","State.$":"States.Format('LAMBDAERROR')","Details.$":"States.Format('Cause: {}', $.Cause)"},"Payload.$":"$"},"Next":"notify"},"Execute Remediation":{"Next":"Remediation Queued","Retry":[{"ErrorEquals":["Lambda.ServiceException","Lambda.AWSLambdaException","Lambda.SdkClientException"],"IntervalSeconds":2,"MaxAttempts":6,"BackoffRate":2}],"Catch":[{"ErrorEquals":["States.ALL"],"Next":"Orchestrator Failed"}],"Type":"Task","Comment":"Execute the SSM Automation Document in the target account","TimeoutSeconds":300,"HeartbeatSeconds":60,"ResultPath":"$.SSMExecution","ResultSelector":{"ExecState.$":"$.Payload.status","Message.$":"$.Payload.message","ExecId.$":"$.Payload.executionid","Account.$":"$.Payload.executionaccount","Region.$":"$.Payload.executionregion"},"Resource":"arn:", + "","Payload.$":"$"}},"Orchestrator Failed":{"Type":"Pass","Parameters":{"Notification":{"Message.$":"States.Format('Orchestrator failed: {}', $.Error)","State.$":"States.Format('LAMBDAERROR')","Details.$":"States.Format('Cause: {}', $.Cause)"},"Payload.$":"$"},"Next":"notify"},"Send Task Token":{"Next":"Remediation Wait","Catch":[{"ErrorEquals":["States.ALL"],"Next":"Orchestrator Failed"}],"Type":"Task","Comment":"Send Task Token to SQS Queue for Remediation Scheduling","Resource":"arn:", + { + "Ref": "AWS::Partition", + }, + ":states:::sqs:sendMessage.waitForTaskToken","Parameters":{"QueueUrl":"", + { + "Ref": "SchedulingQueueB533E3CD", + }, + "","MessageBody":{"RemediationDetails.$":"$","TaskToken.$":"$$.Task.Token","AccountId.$":"$.AutomationDocument.AccountId","ResourceRegion.$":"$.AutomationDocument.ResourceRegion","executionId.$":"$$.Execution.Id"}}},"Remediation Wait":{"Type":"Wait","Comment":"Waiting for remediation","TimestampPath":"$.PlannedTimestamp","Next":"Execute Remediation"},"Execute Remediation":{"Next":"Remediation Queued","Retry":[{"ErrorEquals":["Lambda.ServiceException","Lambda.AWSLambdaException","Lambda.SdkClientException"],"IntervalSeconds":2,"MaxAttempts":6,"BackoffRate":2}],"Catch":[{"ErrorEquals":["States.ALL"],"Next":"Orchestrator Failed"}],"Type":"Task","Comment":"Execute the SSM Automation Document in the target account","TimeoutSeconds":300,"HeartbeatSeconds":60,"ResultPath":"$.SSMExecution","ResultSelector":{"ExecState.$":"$.Payload.status","Message.$":"$.Payload.message","ExecId.$":"$.Payload.executionid","Account.$":"$.Payload.executionaccount","Region.$":"$.Payload.executionregion"},"Resource":"arn:", { "Ref": "AWS::Partition", }, @@ -1896,7 +2670,7 @@ exports[`Test if the Stack has all the resources. 1`] = ` "Arn", ], }, - "","Payload.$":"$"}},"Wait for Remediation":{"Type":"Wait","Seconds":15,"Next":"execMonitor"},"Remediation completed?":{"Type":"Choice","Choices":[{"Variable":"$.Remediation.RemediationState","StringEquals":"Failed","Next":"Remediation Failed"},{"Variable":"$.Remediation.ExecState","StringEquals":"Success","Next":"Remediation Succeeded"},{"Variable":"$.Remediation.ExecState","StringEquals":"TimedOut","Next":"Remediation Failed"},{"Variable":"$.Remediation.ExecState","StringEquals":"Cancelling","Next":"Remediation Failed"},{"Variable":"$.Remediation.ExecState","StringEquals":"Cancelled","Next":"Remediation Failed"},{"Variable":"$.Remediation.ExecState","StringEquals":"Failed","Next":"Remediation Failed"}],"Default":"Wait for Remediation"},"Remediation Failed":{"Type":"Pass","Comment":"Set parameters for notification","Parameters":{"EventType.$":"$.EventType","Finding.$":"$.Finding","SSMExecution.$":"$.SSMExecution","AutomationDocument.$":"$.AutomationDocument","Notification":{"Message.$":"States.Format('Remediation failed for {} control {} in account {}: {}', $.AutomationDocument.SecurityStandard, $.AutomationDocument.ControlId, $.AutomationDocument.AccountId, $.Remediation.Message)","State.$":"$.Remediation.ExecState","Details.$":"$.Remediation.LogData","ExecId.$":"$.Remediation.ExecId","AffectedObject.$":"$.Remediation.AffectedObject"}},"Next":"notify"},"Remediation Succeeded":{"Type":"Pass","Comment":"Set parameters for notification","Parameters":{"EventType.$":"$.EventType","Finding.$":"$.Finding","AccountId.$":"$.AutomationDocument.AccountId","AutomationDocId.$":"$.AutomationDocument.AutomationDocId","RemediationRole.$":"$.AutomationDocument.RemediationRole","ControlId.$":"$.AutomationDocument.ControlId","SecurityStandard.$":"$.AutomationDocument.SecurityStandard","SecurityStandardVersion.$":"$.AutomationDocument.SecurityStandardVersion","Notification":{"Message.$":"States.Format('Remediation succeeded for {} control {} in account {}: {}', $.AutomationDocument.SecurityStandard, $.AutomationDocument.ControlId, $.AutomationDocument.AccountId, $.Remediation.Message)","State.$":"States.Format('SUCCESS')","Details.$":"$.Remediation.LogData","ExecId.$":"$.Remediation.ExecId","AffectedObject.$":"$.Remediation.AffectedObject"}},"Next":"notify"},"check_ssm_doc_state Error":{"Type":"Pass","Parameters":{"Notification":{"Message.$":"States.Format('check_ssm_doc_state returned an error: {}', $.AutomationDocument.Message)","State.$":"States.Format('LAMBDAERROR')"},"EventType.$":"$.EventType","Finding.$":"$.Finding"},"Next":"notify"},"Security Standard is not enabled":{"Type":"Pass","Parameters":{"Notification":{"Message.$":"States.Format('Security Standard ({}) v{} is not enabled.', $.AutomationDocument.SecurityStandard, $.AutomationDocument.SecurityStandardVersion)","State.$":"States.Format('STANDARDNOTENABLED')","updateSecHub":"yes"},"EventType.$":"$.EventType","Finding.$":"$.Finding","AccountId.$":"$.AutomationDocument.AccountId","AutomationDocId.$":"$.AutomationDocument.AutomationDocId","RemediationRole.$":"$.AutomationDocument.RemediationRole","ControlId.$":"$.AutomationDocument.ControlId","SecurityStandard.$":"$.AutomationDocument.SecurityStandard","SecurityStandardVersion.$":"$.AutomationDocument.SecurityStandardVersion"},"Next":"notify"},"No Remediation for Control":{"Type":"Pass","Parameters":{"Notification":{"Message.$":"States.Format('Security Standard {} v{} control {} has no automated remediation.', $.AutomationDocument.SecurityStandard, $.AutomationDocument.SecurityStandardVersion, $.AutomationDocument.ControlId)","State.$":"States.Format('NOREMEDIATION')","updateSecHub":"yes"},"EventType.$":"$.EventType","Finding.$":"$.Finding","AccountId.$":"$.AutomationDocument.AccountId","AutomationDocId.$":"$.AutomationDocument.AutomationDocId","RemediationRole.$":"$.AutomationDocument.RemediationRole","ControlId.$":"$.AutomationDocument.ControlId","SecurityStandard.$":"$.AutomationDocument.SecurityStandard","SecurityStandardVersion.$":"$.AutomationDocument.SecurityStandardVersion"},"Next":"notify"}}},"ItemsPath":"$.Findings"},"EOJ":{"Type":"Pass","Comment":"END-OF-JOB","End":true}},"TimeoutSeconds":900}", + "","Payload.$":"$"}},"Wait for Remediation":{"Type":"Wait","Seconds":15,"Next":"execMonitor"},"Remediation completed?":{"Type":"Choice","Choices":[{"Variable":"$.Remediation.RemediationState","StringEquals":"Failed","Next":"Remediation Failed"},{"Variable":"$.Remediation.ExecState","StringEquals":"Success","Next":"Remediation Succeeded"},{"Variable":"$.Remediation.ExecState","StringEquals":"TimedOut","Next":"Remediation Failed"},{"Variable":"$.Remediation.ExecState","StringEquals":"Cancelling","Next":"Remediation Failed"},{"Variable":"$.Remediation.ExecState","StringEquals":"Cancelled","Next":"Remediation Failed"},{"Variable":"$.Remediation.ExecState","StringEquals":"Failed","Next":"Remediation Failed"}],"Default":"Wait for Remediation"},"Remediation Failed":{"Type":"Pass","Comment":"Set parameters for notification","Parameters":{"EventType.$":"$.EventType","Finding.$":"$.Finding","SSMExecution.$":"$.SSMExecution","AutomationDocument.$":"$.AutomationDocument","Notification":{"Message.$":"States.Format('Remediation failed for {} control {} in account {}: {}', $.AutomationDocument.SecurityStandard, $.AutomationDocument.ControlId, $.AutomationDocument.AccountId, $.Remediation.Message)","State.$":"$.Remediation.ExecState","Details.$":"$.Remediation.LogData","ExecId.$":"$.Remediation.ExecId","AffectedObject.$":"$.Remediation.AffectedObject"}},"Next":"notify"},"Remediation Succeeded":{"Type":"Pass","Comment":"Set parameters for notification","Parameters":{"EventType.$":"$.EventType","Finding.$":"$.Finding","AccountId.$":"$.AutomationDocument.AccountId","AutomationDocId.$":"$.AutomationDocument.AutomationDocId","RemediationRole.$":"$.AutomationDocument.RemediationRole","ControlId.$":"$.AutomationDocument.ControlId","SecurityStandard.$":"$.AutomationDocument.SecurityStandard","SecurityStandardVersion.$":"$.AutomationDocument.SecurityStandardVersion","Notification":{"Message.$":"States.Format('Remediation succeeded for {} control {} in account {}: {}', $.AutomationDocument.SecurityStandard, $.AutomationDocument.ControlId, $.AutomationDocument.AccountId, $.Remediation.Message)","State.$":"States.Format('SUCCESS')","Details.$":"$.Remediation.LogData","ExecId.$":"$.Remediation.ExecId","AffectedObject.$":"$.Remediation.AffectedObject"}},"Next":"notify"},"check_ssm_doc_state Error":{"Type":"Pass","Parameters":{"Notification":{"Message.$":"States.Format('check_ssm_doc_state returned an error: {}', $.AutomationDocument.Message)","State.$":"States.Format('LAMBDAERROR')"},"EventType.$":"$.EventType","Finding.$":"$.Finding"},"Next":"notify"},"Security Standard is not enabled":{"Type":"Pass","Parameters":{"Notification":{"Message.$":"States.Format('Security Standard ({}) v{} is not enabled.', $.AutomationDocument.SecurityStandard, $.AutomationDocument.SecurityStandardVersion)","State.$":"States.Format('STANDARDNOTENABLED')","updateSecHub":"yes"},"EventType.$":"$.EventType","Finding.$":"$.Finding","AccountId.$":"$.AutomationDocument.AccountId","AutomationDocId.$":"$.AutomationDocument.AutomationDocId","RemediationRole.$":"$.AutomationDocument.RemediationRole","ControlId.$":"$.AutomationDocument.ControlId","SecurityStandard.$":"$.AutomationDocument.SecurityStandard","SecurityStandardVersion.$":"$.AutomationDocument.SecurityStandardVersion"},"Next":"notify"},"No Remediation for Control":{"Type":"Pass","Parameters":{"Notification":{"Message.$":"States.Format('Security Standard {} v{} control {} has no automated remediation.', $.AutomationDocument.SecurityStandard, $.AutomationDocument.SecurityStandardVersion, $.AutomationDocument.ControlId)","State.$":"States.Format('NOREMEDIATION')","updateSecHub":"yes"},"EventType.$":"$.EventType","Finding.$":"$.Finding","AccountId.$":"$.AutomationDocument.AccountId","AutomationDocId.$":"$.AutomationDocument.AutomationDocId","RemediationRole.$":"$.AutomationDocument.RemediationRole","ControlId.$":"$.AutomationDocument.ControlId","SecurityStandard.$":"$.AutomationDocument.SecurityStandard","SecurityStandardVersion.$":"$.AutomationDocument.SecurityStandardVersion"},"Next":"notify"}}},"ItemsPath":"$.Findings"},"EOJ":{"Type":"Pass","Comment":"END-OF-JOB","End":true}},"TimeoutSeconds":5400}", ], ], }, @@ -1933,19 +2707,65 @@ exports[`Test if the Stack has all the resources. 1`] = ` "Type": "AWS::StepFunctions::StateMachine", "UpdateReplacePolicy": "Delete", }, - "sendNotifications1367638A": { + "schedulingLambdaTrigger24179157": { "DependsOn": [ - "notifyRole40298120", + "SchedulingLambdaRoleDefaultPolicy73C1B49B", + "SchedulingLambdaRoleAB00F55C", ], - "Metadata": { - "cdk_nag": { - "rules_to_suppress": [ - { - "id": "AwsSolutions-L1", - "reason": "Will upgrade in next release to prioritize patch", + "Properties": { + "Code": { + "S3Bucket": "solutions-eu-west-1", + "S3Key": "aws-security-hub-automated-response-and-remediation/v1.0.0/lambda/schedule_remediation.py.zip", + }, + "Description": "SO0111 ASR function that schedules remediations in member accounts", + "Environment": { + "Variables": { + "RemediationWaitTime": "3", + "SchedulingTableName": { + "Ref": "SchedulingTable1EC09B43", }, + }, + }, + "FunctionName": "SO0111-SHARR-schedulingLambdaTrigger", + "Handler": "schedule_remediation.lambda_handler", + "Layers": [ + { + "Ref": "SharrLambdaLayer5BF8F147", + }, + ], + "MemorySize": 128, + "ReservedConcurrentExecutions": 1, + "Role": { + "Fn::GetAtt": [ + "SchedulingLambdaRoleAB00F55C", + "Arn", + ], + }, + "Runtime": "python3.9", + "Timeout": 10, + }, + "Type": "AWS::Lambda::Function", + }, + "schedulingLambdaTriggerSqsEventSourcestackSchedulingQueue75049B5469A066D6": { + "Properties": { + "BatchSize": 1, + "EventSourceArn": { + "Fn::GetAtt": [ + "SchedulingQueueB533E3CD", + "Arn", ], }, + "FunctionName": { + "Ref": "schedulingLambdaTrigger24179157", + }, + }, + "Type": "AWS::Lambda::EventSourceMapping", + }, + "sendNotifications1367638A": { + "DependsOn": [ + "notifyRole40298120", + ], + "Metadata": { "cfn_nag": { "rules_to_suppress": [ { @@ -1998,6 +2818,77 @@ exports[`Test if the Stack has all the resources. 1`] = ` }, "Type": "AWS::Lambda::Function", }, + "stackRole9B4D53CC": { + "Metadata": { + "cdk_nag": { + "rules_to_suppress": [ + { + "id": "AwsSolutions-IAM5", + "reason": "Resource * is needed for CloudWatch Logs policies used on Lambda functions.", + }, + ], + }, + }, + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "lambda.amazonaws.com", + }, + }, + ], + "Version": "2012-10-17", + }, + "Policies": [ + { + "PolicyDocument": { + "Statement": [ + { + "Action": "cloudwatch:PutMetricData", + "Effect": "Allow", + "Resource": "*", + }, + { + "Action": [ + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:PutLogEvents", + ], + "Effect": "Allow", + "Resource": "*", + }, + { + "Action": [ + "ssm:GetParameter", + "ssm:GetParameters", + "ssm:PutParameter", + ], + "Effect": "Allow", + "Resource": { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition", + }, + ":ssm:*:111111111111:parameter/Solutions/SO0111/*", + ], + ], + }, + }, + ], + "Version": "2012-10-17", + }, + "PolicyName": "LambdaPolicy", + }, + ], + }, + "Type": "AWS::IAM::Role", + }, }, } `; diff --git a/source/test/orchestrator.test.ts b/source/test/orchestrator.test.ts index 2da05cee..be1773b9 100644 --- a/source/test/orchestrator.test.ts +++ b/source/test/orchestrator.test.ts @@ -7,6 +7,7 @@ import { StringParameter } from 'aws-cdk-lib/aws-ssm'; import { Template } from 'aws-cdk-lib/assertions'; import { AwsSolutionsChecks } from 'cdk-nag'; import { OrchestratorConstruct } from '../lib/common-orchestrator-construct'; +import * as sqs from 'aws-cdk-lib/aws-sqs'; test('test App Orchestrator Construct', () => { const app = new App(); @@ -43,6 +44,12 @@ test('test App Orchestrator Construct', () => { stringValue: kmsKey.keyArn, }); + const schedulingQueue = new sqs.Queue(stack, 'SchedulingQueue', { + encryption: sqs.QueueEncryption.KMS, + enforceSSL: true, + encryptionMasterKey: kmsKey, + }); + new OrchestratorConstruct(stack, 'Orchestrator', { roleArn: 'arn:aws-test:iam::111122223333:role/TestRole', ssmDocStateLambda: 'arn:aws:lambda:us-east-1:111122223333:function/foobar', @@ -55,6 +62,7 @@ test('test App Orchestrator Construct', () => { solutionVersion: '1.1.1', orchLogGroup: 'ORCH_LOG_GROUP', kmsKeyParm: kmsKeyParm, + sqsQueue: schedulingQueue, }); Aspects.of(app).add(new AwsSolutionsChecks({ verbose: true })); expect(Template.fromStack(stack)).toMatchSnapshot(); diff --git a/source/test/regex_registry.ts b/source/test/regex_registry.ts index ccd3a4bb..2ecebeb1 100644 --- a/source/test/regex_registry.ts +++ b/source/test/regex_registry.ts @@ -130,23 +130,23 @@ export function getRegexRegistry(): RegexRegistry { 'arn:aws:iam::111111111111:role/', 'art:aws:iam::111111111111:role/standard', 'arn:aws-fictional-partition:iam::111111111111:role/otherwise-valid', - ] - ) + ], + ), ); registry.addCase( - new RegexTestCase(String.raw`^arn:(aws[a-zA-Z-]*)?:iam::\d{12}:role/[a-zA-Z0-9+=,.@_/-]+$`, 'IAM Role ARN', [], []) + new RegexTestCase(String.raw`^arn:(aws[a-zA-Z-]*)?:iam::\d{12}:role/[a-zA-Z0-9+=,.@_/-]+$`, 'IAM Role ARN', [], []), ); registry.addCase( - new RegexTestCase(String.raw`^[\w+=,.@-]{1,64}$`, 'IAM User Name', ['a-valid-user-name'], ['an invalid username']) + new RegexTestCase(String.raw`^[\w+=,.@-]{1,64}$`, 'IAM User Name', ['a-valid-user-name'], ['an invalid username']), ); const kmsKeyArnTestCase: RegexTestCase = new RegexTestCase( String.raw`^arn:(?:aws|aws-us-gov|aws-cn):kms:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:(?:(?:alias/[A-Za-z0-9/-_])|(?:key/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})))$`, 'KMS Key ARN as Key ID or Alias, no match', [], - [] + [], ); // TODO: ES regex engine doesn't support case-insensitive non-capturing groups, just include capitals kmsKeyArnTestCase.disable(); @@ -156,7 +156,7 @@ export function getRegexRegistry(): RegexRegistry { String.raw`^(?:arn:(?:aws|aws-us-gov|aws-cn):kms:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:)?(?:(?:alias/[A-Za-z0-9/_-]+)|(?:key/(?i:[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12})))$`, 'KMS Key ARN or Key ID or Alias, no match', ['alias/aws/rds'], - [] + [], ); // TODO: ES regex engine doesn't support case-insensitive non-capturing groups, just include capitals kmsKeyArnOrIdOrAlias.disable(); @@ -167,8 +167,8 @@ export function getRegexRegistry(): RegexRegistry { String.raw`^[\w+=,.@-]+$`, 'IAM Role name, no match', ['SO0111-RemediationRoleName'], - ['Not:A:IAM:Role:Name'] - ) + ['Not:A:IAM:Role:Name'], + ), ); registry.addCase(new RegexTestCase(String.raw`^[\w+=,.@/-]+$`, 'IAM Role name with path, no match', [], [])); @@ -182,8 +182,8 @@ export function getRegexRegistry(): RegexRegistry { String.raw`^$|^[a-zA-Z0-9/_-]{1,256}$`, 'KMS Key Alias, no match', ['default-s3-encryption', 'sharr-test-key-alias'], - ['asdf,vvv'] - ) + ['asdf,vvv'], + ), ); registry.addCase(new RegexTestCase(String.raw`[a-z0-9-]{1,2048}`, 'KMS Key ID', [], [])); @@ -193,8 +193,8 @@ export function getRegexRegistry(): RegexRegistry { String.raw`(?=^.{3,63}$)(?!^(\d+\.)+\d+$)(^(([a-z0-9]|[a-z0-9][a-z0-9\-]*[a-z0-9])\.)*([a-z0-9]|[a-z0-9][a-z0-9\-]*[a-z0-9])$)`, 'S3 Bucket Name', ['replace-this-with-s3-bucket-name-for-audit-logging'], - [] - ) + [], + ), ); registry.addCase(new RegexTestCase(String.raw`^[A-Za-z0-9][A-Za-z0-9\-_]{1,254}$`, 'CodeBuild project name', [], [])); @@ -229,21 +229,21 @@ export function getRegexRegistry(): RegexRegistry { 'prefix-db-0123456789ABCDEFGHIJKLMNOP', 'db-0123456789ABCDEFGHIJKLMNOP-suffix', 'db-0123456789abcdefghijklmnop', - ] - ) + ], + ), ); registry.addCase( - new RegexTestCase(String.raw`^[a-zA-Z](?:[0-9a-zA-Z]+[-]{1})*[0-9a-zA-Z]{1,}$`, 'RDS DB Snapshot ID', [], []) + new RegexTestCase(String.raw`^[a-zA-Z](?:[0-9a-zA-Z]+[-]{1})*[0-9a-zA-Z]{1,}$`, 'RDS DB Snapshot ID', [], []), ); registry.addCase( new RegexTestCase( - String.raw`^(?:rds:)?(?!.*--.*)(?!.*-$)[a-zA-Z][a-zA-Z0-9-]{0,254}$`, + String.raw`^(?:rds:|awsbackup:)?(?!.*--.*)(?!.*-$)[a-zA-Z][a-zA-Z0-9-]{0,254}$`, 'RDS DB Snapshot Name', [], - [] - ) + [], + ), ); registry.addCase( @@ -251,8 +251,8 @@ export function getRegexRegistry(): RegexRegistry { String.raw`^(?!.*--.*)(?!.*-$)[a-zA-Z][a-zA-Z0-9-]{0,254}$`, 'RDS DB Snapshot Name, no automated snapshots', [], - [] - ) + [], + ), ); registry.addCase( @@ -260,8 +260,8 @@ export function getRegexRegistry(): RegexRegistry { String.raw`^(?!.*--)[a-zA-Z][a-zA-Z0-9.,$;-]{0,58}[^-]$`, 'RDS DB Instance Identifier', ['database-1'], - ['not--valid', 'notvalid--', 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'] - ) + ['not--valid', 'notvalid--', 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'], + ), ); registry.addCase(new RegexTestCase(String.raw`^[A-Za-z0-9._-]{3,128}$`, 'CloudTrail Name', [], [])); @@ -271,8 +271,8 @@ export function getRegexRegistry(): RegexRegistry { String.raw`(^arn:(aws[a-zA-Z-]*)?:cloudtrail:[a-z0-9-]+:\d{12}:trail\/(?![-_.])(?!.*[-_.]{2})(?!.*[-_.]$)(?!^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$)[-\w.]{3,128}$)|(^(?![-_.])(?!.*[-_.]{2})(?!.*[-_.]$)(?!^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$)[-\w.]{3,128}$)`, 'CloudTrail Name or ARN', [], - [] - ) + [], + ), ); registry.addCase( @@ -280,8 +280,8 @@ export function getRegexRegistry(): RegexRegistry { String.raw`^arn:(?:aws|aws-cn|aws-us-gov):cloudtrail:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:trail/[A-Za-z0-9._-]{3,128}$`, 'CloudTrail ARN', [], - [] - ) + [], + ), ); registry.addCase(new RegexTestCase(String.raw`^[a-zA-Z0-9-_./]{1,512}$`, 'CloudTrail Log Group Name', [], [])); @@ -290,7 +290,7 @@ export function getRegexRegistry(): RegexRegistry { String.raw`^.{1,255}$`, 'AutoScaling Group Name', ['my-group'], - ['', 'a'.repeat(256)] + ['', 'a'.repeat(256)], ); registry.addCase(autoScalingGroupNameTestCase); @@ -303,8 +303,8 @@ export function getRegexRegistry(): RegexRegistry { 'arn:aws:sns:us-east-1:111122223333:SomeTopicName', 'arn:aws-us-gov:sns:us-gov-east-1:111122223333:SomeTopicName', ], - [] - ) + [], + ), ); registry.addCase(new RegexTestCase(String.raw`^(?:[1-9]\d{0,3}|10000)$`, 'Integer, [1, 10000]', ['90'], [])); @@ -315,8 +315,8 @@ export function getRegexRegistry(): RegexRegistry { String.raw`^(\b([0-9]|[1-8][0-9]|9[0-9]|[1-8][0-9]{2}|9[0-8][0-9]|99[0-9]|[1-8][0-9]{3}|9[0-8][0-9]{2}|99[0-8][0-9]|999[0-9]|10000)\b)$`, 'Integer, [1, 10000]', [], - [] - ) + [], + ), ); registry.addCase(new RegexTestCase(String.raw`^[0-9]\d*$`, 'Integer', [], [])); @@ -335,12 +335,12 @@ export function getRegexRegistry(): RegexRegistry { 'still--invalid', 'again-invalid-6-', 'too-long-cluster-name-too-long-too-long-cluster-name-too-long-23', - ] - ) + ], + ), ); registry.addCase( - new RegexTestCase(String.raw`^[^"'\\ ]{0,512}$`, 'The prefix applied to the log file names.', [], []) + new RegexTestCase(String.raw`^[^"'\\ ]{0,512}$`, 'The prefix applied to the log file names.', [], []), ); registry.addCase( @@ -351,16 +351,21 @@ export function getRegexRegistry(): RegexRegistry { [ 'arn:aws:cloudformation:us-east-1:111111111111:stack/bad_stack_name/00000000-0000-0000-0000-000000000000', 'arn:aws:cloudformation:us-east-1:111111111111:stack/my-stack/bad_uuid', - ] - ) + ], + ), ); registry.addCase( - new RegexTestCase(String.raw`^[a-zA-Z0-9_-]{1,80}(?:\.fifo)?$`, 'SQS Queue name', ['a_real_queue'], ['a.bad.queue']) + new RegexTestCase( + String.raw`^[a-zA-Z0-9_-]{1,80}(?:\.fifo)?$`, + 'SQS Queue name', + ['a_real_queue'], + ['a.bad.queue'], + ), ); registry.addCase( - new RegexTestCase(String.raw`^(?:[0-9]|[1-9][0-9]|100)$`, 'Number 0-100', ['0', '100', '27'], ['011']) + new RegexTestCase(String.raw`^(?:[0-9]|[1-9][0-9]|100)$`, 'Number 0-100', ['0', '100', '27'], ['011']), ); addIamMatchTestCases(registry); @@ -375,6 +380,18 @@ export function getRegexRegistry(): RegexRegistry { addRedshiftMatchTestCases(registry); addSNSMatchTestCases(registry); addSQSMatchTestCases(registry); + addEC2InstanceMatchTestCases(registry); + addEC2InstanceIdMatchTestCases(registry); + addECRRepositoryMatchTestCases(registry); + addECRRepositoryARNMatchTestCases(registry); + addCloudFrontMatchTestCases(registry); + addCloudFrontDefaultObjectTestCases(registry); + addCloudFrontMatchTestCasesAlternative(registry); + addCloudFrontDistributionIdTestCases(registry); + addSSMDocumentArnTestCases(registry); + addTransitGatewayIdTestCases(registry); + addTransitGatewayARNTestCases(registry); + addSecretsManagerArnTestCases(registry); return registry; } @@ -394,7 +411,7 @@ function addIamMatchTestCases(registry: RegexRegistry) { 'arn:aws:iam::111111111111:user//SuprisinglyInvalid', 'arn:aws:iam::111111111111:user/AUsernameThatIsJustTooLongPleaseReconsiderCreatingUsernamesThisLong', `arn:aws:iam::111111111111:user/${'_'.repeat(511)}/PathTooLong`, - ] + ], ); iamUserTestCase.addMatchTestCase('arn:aws:iam::111111111111:user/TestUser', ['TestUser']); iamUserTestCase.addMatchTestCase('arn:aws-us-gov:iam::111111111111:user/with/path/user@example.com', [ @@ -412,11 +429,11 @@ function addAutoScalingMatchTestCases(registry: RegexRegistry) { ], [ 'arn:aws:autoscaling:us-east-1:111111111111:autoScalingGroup:00000000-0000-0000-0000-000000000000:autoScalingGroupName/', - ] + ], ); autoScalingGroupNameTestCase.addMatchTestCase( 'arn:aws:autoscaling:us-east-1:111111111111:autoScalingGroup:00000000-0000-0000-0000-000000000000:autoScalingGroupName/my-group', - ['my-group'] + ['my-group'], ); registry.addCase(autoScalingGroupNameTestCase); } @@ -426,7 +443,7 @@ function addCloudTrailMatchTestCases(registry: RegexRegistry) { String.raw`^arn:(?:aws|aws-cn|aws-us-gov):cloudtrail:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:trail/([A-Za-z0-9._-]{3,128})$`, 'CloudTrail ARN, capture name', [], - [] + [], ); registry.addCase(cloudTrailNameTestCase); } @@ -436,7 +453,7 @@ function addCodeBuildMatchTestCases(registry: RegexRegistry) { String.raw`^arn:(?:aws|aws-cn|aws-us-gov):codebuild:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:project/([A-Za-z0-9][A-Za-z0-9\-_]{1,254})$`, 'CodeBuild Project ARN, capture project name', [], - [] + [], ); registry.addCase(codeBuildProjectNameTestCase); } @@ -446,7 +463,7 @@ function addEc2MatchTestCases(registry: RegexRegistry) { String.raw`^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:security-group/(sg-[0-9a-f]*)$`, 'Security Group ARN, capture group ID', [], - [] + [], ); registry.addCase(securityGroupIdTestCase); @@ -455,7 +472,7 @@ function addEc2MatchTestCases(registry: RegexRegistry) { String.raw`^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:security-group/(sg-[a-f0-9]{8,17})$`, 'Security Group ARN, capture group ID', [], - [] + [], ); registry.addCase(securityGroupIdTestCaseAlternative); @@ -464,15 +481,23 @@ function addEc2MatchTestCases(registry: RegexRegistry) { String.raw`^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-[0-9]):[0-9]{12}:security-group/(sg-[a-f0-9]{8,17})$`, 'Security Group ARN, capture group ID', [], - [] + [], ); registry.addCase(securityGroupIdTestCaseAlternativeTwo); + const securityGroupIdTestCaseAlternativeThree: RegexMatchTestCase = new RegexMatchTestCase( + String.raw`^sg-[a-z0-9\-]+$`, + 'Security Group ID', + ['sg-02ce123456e7893c'], + ['sg-NOTREAL'], + ); + registry.addCase(securityGroupIdTestCaseAlternativeThree); + const vpcIdTestCase: RegexMatchTestCase = new RegexMatchTestCase( String.raw`^arn:(?:aws|aws-cn|aws-us-gov):ec2:.*:\d{12}:vpc/(vpc-[0-9a-f]{8,17})$`, 'VPC ARN, capture VPC ID', [], - [] + [], ); registry.addCase(vpcIdTestCase); @@ -481,7 +506,7 @@ function addEc2MatchTestCases(registry: RegexRegistry) { String.raw`^arn:(?:aws|aws-cn|aws-us-gov):ec2:.*:\d{12}:vpc/(vpc-[0-9a-f]{8,17}$)`, 'VPC ARN, capture VPC ID', [], - [] + [], ); registry.addCase(vpcIdTestCaseAlternative); @@ -489,7 +514,7 @@ function addEc2MatchTestCases(registry: RegexRegistry) { String.raw`^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:subnet\/(subnet-[0-9a-f]*)$`, 'Subnet ARN', ['arn:aws:ec2:us-east-1:111111111111:subnet/subnet-017e22c0195eb5ded'], - ['arn:aws:ec2:us-east-1:111111111111:subnet-017e22c0195eb5ded'] + ['arn:aws:ec2:us-east-1:111111111111:subnet-017e22c0195eb5ded'], ); registry.addCase(subnetIDTestCase); } @@ -499,21 +524,21 @@ function addLambdaMatchTestCases(registry: RegexRegistry) { String.raw`^arn:(?:aws|aws-us-gov|aws-cn):lambda:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:function:([a-zA-Z0-9\-_]{1,64})$`, 'Lambda Function ARN, capture function name', [], - [] + [], ); registry.addCase(lambdaFunctionNameTestCase); } function addRdsMatchTestCases(registry: RegexRegistry) { const manualSnapshotNameTestCase: RegexMatchTestCase = new RegexMatchTestCase( - String.raw`^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:(cluster-snapshot|snapshot):([a-zA-Z](?:[0-9a-zA-Z]+-)*[0-9a-zA-Z]+)$`, + String.raw`^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:(cluster-snapshot|snapshot):([a-zA-Z][0-9a-zA-Z]*(?:-[0-9a-zA-Z]+)*)$`, 'RDS manual Snapshot ARN, capture snapshot name', ['arn:aws:rds:us-east-1:111111111111:snapshot:blah', 'arn:aws:rds:us-east-1:111111111111:cluster-snapshot:blah'], [ 'arn:aws:rds:us-east-1:111111111111:snapshot:0startingNumber', 'arn:aws:rds:us-east-1:111111111111:snapshot:-startingDash', 'arn:aws:rds:us-east-1:111111111111:snapshot:endingDash-', - ] + ], ); registry.addCase(manualSnapshotNameTestCase); @@ -521,15 +546,15 @@ function addRdsMatchTestCases(registry: RegexRegistry) { String.raw`^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:db:((?!.*--.*)(?!.*-$)[a-z][a-z0-9-]{0,62})$`, 'RDS DB ARN, capture DB name', [], - [] + [], ); registry.addCase(dbNameTestCase); const snapshotNameTestCase: RegexMatchTestCase = new RegexMatchTestCase( - String.raw`^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:((?:cluster-)?snapshot|dbclustersnapshot):((?:rds:)?((?!.*--.*)(?!.*-$)[a-zA-Z][a-zA-Z0-9-]{0,254}))$`, + String.raw`^arn:(?:aws|aws-cn|aws-us-gov):rds:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:((?:cluster-)?snapshot|dbclustersnapshot):((?:rds:|awsbackup:)?((?!.*--.*)(?!.*-$)[a-zA-Z][a-zA-Z0-9-]{0,254}))$`, 'RDS Snapshot ARN, capture snapshot, snapshot name with prefix, snapshot name without prefix', [], - [] + [], ); registry.addCase(snapshotNameTestCase); } @@ -539,7 +564,7 @@ function addS3MatchTestCases(registry: RegexRegistry) { String.raw`^arn:(?:aws|aws-cn|aws-us-gov):s3:::([a-z0-9.-]{3,63})$`, 'S3 Bucket ARN, capture bucket name', [], - [] + [], ); registry.addCase(bucketNameTestCase); @@ -548,7 +573,7 @@ function addS3MatchTestCases(registry: RegexRegistry) { String.raw`^arn:(?:aws|aws-cn|aws-us-gov):s3:::([A-Za-z0-9.-]{3,63})$`, 'S3 Bucket ARN, capture bucket name', [], - [] + [], ); registry.addCase(bucketNameTestCaseAlternative); } @@ -558,7 +583,7 @@ function addKmsMatchTestCases(registry: RegexRegistry) { String.raw`^arn:(?:aws|aws-cn|aws-us-gov):kms:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:key/([A-Za-z0-9-]{36})$`, 'KMS Key ID, capture ID', [], - [] + [], ); registry.addCase(keyIdTestCase); } @@ -580,7 +605,7 @@ function addRedshiftMatchTestCases(registry: RegexRegistry) { 'arn:aws:us-west-2:111111111111:cluster:still--invalid', 'arn:aws:us-west-2:111111111111:cluster:again-invalid-6-', 'arn:aws:us-west-2:111111111111:cluster:too-long-cluster-name-too-long-too-long-cluster-name-too-long-23', - ] + ], ); clusterNameTestCase.addMatchTestCase('arn:aws-cn:redshift:ap-northeast-1:111111111111:cluster:my-cluster-25', [ 'my-cluster-25', @@ -593,7 +618,7 @@ function addSQSMatchTestCases(registry: RegexRegistry) { String.raw`^arn:(?:aws|aws-us-gov|aws-cn):sqs:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:([a-zA-Z0-9_-]{1,80}(?:\.fifo)?)$`, 'SQS Queue ARN, capture name', ['arn:aws:sqs:us-east-1:111111111111:valid_queue'], - ['arn:aws-us-gov:sqs:us-gov-west-1:111111111111:invalid-queue.fifa'] + ['arn:aws-us-gov:sqs:us-gov-west-1:111111111111:invalid-queue.fifa'], ); sqsQueueNameTestCase.addMatchTestCase('arn:aws:sqs:us-east-1:111111111111:valid_queue', ['valid_queue']); registry.addCase(sqsQueueNameTestCase); @@ -604,7 +629,127 @@ function addSNSMatchTestCases(registry: RegexRegistry) { String.raw`^arn:(?:aws|aws-us-gov|aws-cn):sns:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:([a-zA-Z0-9_-]{1,80}(?:\.fifo)?)$`, 'SNS Topic ARN', ['arn:aws:sns:us-east-1:111111111111:TestTopic', 'arn:aws:sns:us-east-1:111111111111:TestTopic.fifo'], - ['arn:aws:sns:us-east-1:111111111111:TestTopic.fifa'] + ['arn:aws:sns:us-east-1:111111111111:TestTopic.fifa'], + ); + registry.addCase(keyIdTestCase); +} + +function addEC2InstanceMatchTestCases(registry: RegexRegistry) { + const keyIdTestCase: RegexMatchTestCase = new RegexMatchTestCase( + String.raw`^arn:(?:aws|aws-cn|aws-us-gov):ec2:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:instance\/(i-[0-9a-f]*)$`, + 'EC2 Instance ARN', + ['arn:aws:ec2:us-east-1:111111111111:instance/i-077c4d5f32561ac45'], + ['arn:aws:ec2:us-east-1:111111111111:instance/instance-notReal'], + ); + registry.addCase(keyIdTestCase); +} + +function addEC2InstanceIdMatchTestCases(registry: RegexRegistry) { + const keyIdTestCase: RegexMatchTestCase = new RegexMatchTestCase( + String.raw`^i-[a-f0-9]{8}(?:[a-f0-9]{9})?$`, + 'EC2 Instance Id', + ['i-077c4d5f32561ac45'], + ['instance-notReal'], + ); + registry.addCase(keyIdTestCase); +} + +function addECRRepositoryARNMatchTestCases(registry: RegexRegistry) { + const keyIdTestCase: RegexMatchTestCase = new RegexMatchTestCase( + String.raw`^arn:(?:aws|aws-cn|aws-us-gov):ecr:[a-z]{2}-[a-z]+-\d{1}:\d{12}:repository\/([a-z0-9._\/\-]+)$`, + 'ECR Repository ARN', + ['arn:aws:ecr:us-east-1:111111111111:repository/test'], + ['arn:aws:ecr:us-east-1:111111111111:fakerepository/NOTREAL'], + ); + registry.addCase(keyIdTestCase); +} + +function addECRRepositoryMatchTestCases(registry: RegexRegistry) { + const keyIdTestCase: RegexMatchTestCase = new RegexMatchTestCase( + String.raw`([a-z0-9._\/\-]+)$`, + 'ECR Repository Name', + ['test-repository'], + ['NOTREAL'], + ); + registry.addCase(keyIdTestCase); +} + +function addCloudFrontMatchTestCases(registry: RegexRegistry) { + const keyIdTestCase: RegexMatchTestCase = new RegexMatchTestCase( + String.raw`^(arn:(?:aws|aws-us-gov|aws-cn):cloudfront::\d{12}:distribution\/([A-Z0-9]+))$`, + 'CloudFront Distribution ARN', + ['arn:aws:cloudfront::111111111111:distribution/D531CA4V1232D8'], + ['arn:aws:cloudfront::111111111111:fakedistribution/NOTREAL'], + ); + registry.addCase(keyIdTestCase); +} + +function addCloudFrontMatchTestCasesAlternative(registry: RegexRegistry) { + const keyIdTestCase: RegexMatchTestCase = new RegexMatchTestCase( + String.raw`^arn:(?:aws|aws-cn|aws-us-gov):cloudfront::[0-9]{12}:distribution\/([A-Z0-9]*)$`, + 'CloudFront Distribution ARN', + ['arn:aws:cloudfront::111111111111:distribution/EDFDVB6EXAMPLE'], + ['arn:aws:cloudfront::111111111111:fakedistribution/NOTREAL'], + ); + registry.addCase(keyIdTestCase); +} + +function addCloudFrontDistributionIdTestCases(registry: RegexRegistry) { + const keyIdTestCase: RegexMatchTestCase = new RegexMatchTestCase( + String.raw`^[A-Za-z0-9]*$`, + 'CloudFront Distribution ID', + ['EDFDVB6EXAMPLE'], + ['NOT-REAL'], + ); + registry.addCase(keyIdTestCase); +} + +function addCloudFrontDefaultObjectTestCases(registry: RegexRegistry) { + const keyIdTestCase: RegexMatchTestCase = new RegexMatchTestCase( + String.raw`^[\w._-~]{1,255}$`, + 'DefaultRootObject', + ['index.html'], + [], + ); + registry.addCase(keyIdTestCase); +} + +function addSSMDocumentArnTestCases(registry: RegexRegistry) { + const keyIdTestCase: RegexMatchTestCase = new RegexMatchTestCase( + String.raw`^(arn:(?:aws|aws-cn|aws-us-gov):ssm:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:document\/[A-Za-z0-9][A-Za-z0-9\-_]{1,254})$`, + 'SSM Document ARN', + ['arn:aws:ssm:us-east-1:111111111111:document/test'], + ['arn:aws:ssm:us-east-1:111111111111:notDocument/test'], + ); + registry.addCase(keyIdTestCase); +} + +function addTransitGatewayIdTestCases(registry: RegexRegistry) { + const keyIdTestCase: RegexMatchTestCase = new RegexMatchTestCase( + String.raw`^tgw-[a-z0-9\-]+$`, + 'Transit Gateway ID', + ['tgw-0111d111d1d1a111b'], + ['tgw-NOTATRANSITGATEWAY'], + ); + registry.addCase(keyIdTestCase); +} + +function addTransitGatewayARNTestCases(registry: RegexRegistry) { + const keyIdTestCase: RegexMatchTestCase = new RegexMatchTestCase( + String.raw`^arn:(?:aws|aws-cn|aws-us-gov):ec2:[a-z]{2}-[a-z]+-\d{1}:\d{12}:transit-gateway\/(tgw-[a-z0-9\-]+)$`, + 'Transit Gateway ARN', + ['arn:aws:ec2:us-east-1:111111111111:transit-gateway/tgw-0111d111d1d1a111b'], + ['arn:aws:ec2:us-east-1:111111111111:not-transit-gateway/tgw-0111d111d1d1a111b'], + ); + registry.addCase(keyIdTestCase); +} + +function addSecretsManagerArnTestCases(registry: RegexRegistry) { + const keyIdTestCase: RegexMatchTestCase = new RegexMatchTestCase( + String.raw`^arn:(?:aws|aws-cn|aws-us-gov):secretsmanager:(?:[a-z]{2}(?:-gov)?-[a-z]+-\d):\d{12}:secret:([A-Za-z0-9\/_+=.@-]+)$`, + 'Secrets Manager Secret ARN', + ['arn:aws:secretsmanager:us-east-1:111111111111:secret:test'], + ['arn:aws:secretsmanager:us-east-1:111111111111:not-secret:test'], ); registry.addCase(keyIdTestCase); } diff --git a/source/test/runbook_validator.test.ts b/source/test/runbook_validator.test.ts index b20cd074..12da517e 100644 --- a/source/test/runbook_validator.test.ts +++ b/source/test/runbook_validator.test.ts @@ -350,7 +350,7 @@ test.skip.each(remediationRunbooks)( (runbook: RunbookTestHelper) => { const description: string = runbook.getObject().description; expect(desriptionDocumentsSecurityStandards(description)).toBe(true); - } + }, ); function isAssumeRoleParameter(value: string): boolean { @@ -361,11 +361,11 @@ test.each(runbooks)('%s takes AssumeRole as parameter', (runbook: RunbookTestHel expect(isAssumeRoleParameter(runbook.getObject().assumeRole)).toBe(true); expect(runbook.getObject().parameters.AutomationAssumeRole.type).toStrictEqual('String'); expect(runbook.getObject().parameters.AutomationAssumeRole.description).toStrictEqual( - '(Required) The ARN of the role that allows Automation to perform the actions on your behalf.' + '(Required) The ARN of the role that allows Automation to perform the actions on your behalf.', ); expect(runbook.getObject().parameters.AutomationAssumeRole).not.toHaveProperty('default'); expect(runbook.getObject().parameters.AutomationAssumeRole.allowedPattern).toStrictEqual( - regexRegistry.getRegexForAutomationAssumeRole() + regexRegistry.getRegexForAutomationAssumeRole(), ); }); @@ -380,7 +380,7 @@ test.skip.each(remediationRunbooks)('%s has outputs', (runbook: RunbookTestHelpe test.each(controlRunbooks)('%s takes finding as parameter', (runbook: RunbookTestHelper) => { expect(runbook.getObject().parameters.Finding.type).toStrictEqual('StringMap'); expect(runbook.getObject().parameters.Finding.description).toStrictEqual( - `The input from the Orchestrator Step function for the ${runbook.getControlName()} finding` + `The input from the Orchestrator Step function for the ${runbook.getControlName()} finding`, ); }); diff --git a/source/test/test_data/tstest-runbook.yaml b/source/test/test_data/tstest-runbook.yaml index 5a9ee94f..6c803196 100644 --- a/source/test/test_data/tstest-runbook.yaml +++ b/source/test/test_data/tstest-runbook.yaml @@ -11,9 +11,9 @@ description: | * KMSKeyArn (from SSM): Arn of the KMS key to be used to encrypt data ## Security Standards / Controls - * AFSBP v1.0.0: N/A - * CIS v1.2.0: 2.4 - * PCI: CloudTrail.4 + * AWS FSBP v1.0.0: N/A + * CIS v1.2.0: 2.4 + * PCI: CloudTrail.4 schemaVersion: "0.3" assumeRole: "{{ AutomationAssumeRole }}" diff --git a/tox.ini b/tox.ini new file mode 100644 index 00000000..063a47a9 --- /dev/null +++ b/tox.ini @@ -0,0 +1,40 @@ +; Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +; SPDX-License-Identifier: Apache-2.0 +[tox] +min_version=4.0 +env_list=format, lint + +[testenv:format] +skip_install=true +deps= + isort + black +commands= + isort --profile black --check . + black --check . + +[flake8] +exclude= + .tox + .venv + node_modules +extend-ignore= + # line length, handled by black + E501, + # whitespace, handled by black + E203, + # TODO: invalid escape sequence '\d' + W605, + +[testenv:lint] +deps=-r ./deployment/requirements_dev.txt +commands= + mypy ./simtest + mypy ./source/layer + mypy ./source/Orchestrator + mypy ./source/playbooks/AFSBP/ssmdocs/scripts + mypy ./source/playbooks/common + mypy ./source/playbooks/PCI321/ssmdocs/scripts + mypy ./source/playbooks/SC/ssmdocs/scripts + mypy ./source/solution_deploy/source + flake8 .