Skip to content

Branch was auto-updated. #18143

Branch was auto-updated.

Branch was auto-updated. #18143

name: detection-testing
on:
push:
pull_request:
types: [opened, reopened]
schedule:
- cron: "44 4 * * *"
jobs:
validate-tag-if-present:
runs-on: ubuntu-latest
steps:
- name: TAGGED, Validate that the tag is in the correct format
run: |
echo "The GITHUB_REF: $GITHUB_REF"
#First check to see if the release is a tag
if [[ $GITHUB_REF =~ refs/tags/* ]]; then
#Yes, this is a tag, so we need to test to make sure that the tag
#is in the correct format (like v1.10.20)
if [[ $GITHUB_REF =~ refs/tags/v[0-9]+.[0-9]+.[0-9]+ ]]; then
echo "PASS: Tagged release with good format"
exit 0
else
echo "FAIL: Tagged release with bad format"
exit 1
fi
else
echo "PASS: Not a tagged release"
exit 0
fi
quit-for-dependabot:
runs-on: ubuntu-latest
if: github.actor != 'dependabot[bot]' && github.actor != 'dependabot-preview[bot]'
steps:
- name: "Placeholder"
run: |
echo "yes it ran"
docker-detection-testing-setup:
runs-on: ubuntu-latest
if: "!contains(github.ref, 'refs/tags/')" #don't run on tags - future steps won't run either since they depend on this job
needs: [validate-tag-if-present, quit-for-dependabot]
steps:
- name: Get branch and PR required for detection testing main.py
id: vars
run: |
echo "::set-output name=branch::${GITHUB_REF#refs/heads/}"
- name: Checkout Repo
uses: actions/checkout@v2
#with:
# ref: develop
- uses: actions/setup-python@v2
with:
python-version: '3.9' #Available versions here - https://github.com/actions/python-versions/releases easy to change/make a matrix/use pypy
architecture: 'x64' # optional x64 or x86. Defaults to x64 if not specified
cache: 'pip'
- name: Install Python Dependencies
run: |
python -m venv .venv
source .venv/bin/activate
python -m pip install wheel
python -m pip install -r requirements.txt
- name: Run the CI
run: |
source .venv/bin/activate
cd bin/docker_detection_tester
echo "github.event.issue.pull_request : [${{ github.event.issue.pull_request }}]"
echo "github.event.pull_request.number : [${{ github.event.pull_request.number }}]"
echo "steps.vars.outputs.branch : [${{ steps.vars.outputs.branch }}]"
echo "github.event.pull_request.head.ref : [${{ github.event.pull_request.head.ref }}]"
echo "github.event_name : [${{ github.event_name }}]"
if [[ ${{ github.event_name }} == schedule ]]; then
# Note that scheduled actions ONLY run on the default branch, so it won't run on all other branches!
echo "Running a nightly test on all detections OR a commit was made directly to develop"
python detection_testing_execution.py run --branch develop --mode all --mock --config_file test_config_github_actions.json
elif [[ ! -z "${{ github.event.pull_request.head.ref }}" && ! -z "${{ github.event.pull_request.number }}" ]]; then
echo "Pull request from source branch [${{ github.event.pull_request.head.ref }}] for PR number [${{ github.event.issue.number }}]"
python detection_testing_execution.py run --branch ${{ github.event.pull_request.head.ref }} --pr_number ${{ github.event.pull_request.number }} --mode changes --mock --config_file test_config_github_actions.json
else
echo "Push from branch [${{ steps.vars.outputs.branch }}]"
python detection_testing_execution.py run --branch ${{ steps.vars.outputs.branch }} --mode changes --mock --config_file test_config_github_actions.json
fi
mv *-test-run.json replicate_test.json
- name: Upload Test Results Files
uses: actions/upload-artifact@v2
with:
name: testing-results-config
path: |
bin/docker_detection_tester/prior_config/apps/DA-ESS-ContentUpdate-latest.tar.gz
bin/docker_detection_tester/prior_config/config_tests_0.json
bin/docker_detection_tester/prior_config/config_tests_1.json
bin/docker_detection_tester/prior_config/config_tests_2.json
bin/docker_detection_tester/prior_config/config_tests_3.json
bin/docker_detection_tester/prior_config/config_tests_4.json
bin/docker_detection_tester/prior_config/config_tests_5.json
bin/docker_detection_tester/prior_config/config_tests_6.json
bin/docker_detection_tester/prior_config/config_tests_7.json
bin/docker_detection_tester/prior_config/config_tests_8.json
bin/docker_detection_tester/prior_config/config_tests_9.json
- name: Upload File to Enable Replication of the Test at a Different Time or Place
uses: actions/upload-artifact@v2
with:
name: replicate_test
path: |
bin/docker_detection_tester/replicate_test.json
docker-detection-testing-execution:
runs-on: ubuntu-latest
if: "!contains(github.ref, 'refs/tags/')" #don't run on tags - future steps won't run either since they depend on this job
needs: [docker-detection-testing-setup]
strategy:
matrix:
manifest_filename: ["config_tests_0.json",
"config_tests_1.json",
"config_tests_2.json",
"config_tests_3.json",
"config_tests_4.json",
"config_tests_5.json",
"config_tests_6.json",
"config_tests_7.json",
"config_tests_8.json",
"config_tests_9.json"]
steps:
- name: Get branch and PR required for detection testing main.py
id: vars
run: |
echo "::set-output name=branch::${GITHUB_REF#refs/heads/}"
- name: Checkout Repo
uses: actions/checkout@v2
#with:
# ref: develop
- name: Download artifacts
uses: actions/download-artifact@v2
with:
name: testing-results-config
path: bin/docker_detection_tester/prior_config
- uses: actions/setup-python@v2
with:
python-version: '3.9' #Available versions here - https://github.com/actions/python-versions/releases easy to change/make a matrix/use pypy
architecture: 'x64' # optional x64 or x86. Defaults to x64 if not specified
cache: 'pip'
- name: Install Python Dependencies
run: |
python -m venv .venv
source .venv/bin/activate
python -m pip install wheel
python -m pip install -r requirements.txt
- name: Run the CI
run: |
source .venv/bin/activate
cd bin/docker_detection_tester
python detection_testing_execution.py run -c prior_config/${{ matrix.manifest_filename}}
- name: Upload Test Results Files
uses: actions/upload-artifact@v2
with:
name: ${{ matrix.manifest_filename}}.results
path: |
bin/docker_detection_tester/test_results/success.csv
bin/docker_detection_tester/test_results/error.csv
bin/docker_detection_tester/test_results/failure.csv
bin/docker_detection_tester/test_results/combined.csv
bin/docker_detection_tester/test_results/success.json
bin/docker_detection_tester/test_results/error.json
bin/docker_detection_tester/test_results/failure.json
bin/docker_detection_tester/test_results/combined.json
bin/docker_detection_tester/test_results/summary.json
docker-detection-testing-execution-merge-results:
runs-on: ubuntu-latest
if: "!contains(github.ref, 'refs/tags/')" #don't run on tags - future steps won't run either since they depend on this job
needs: [docker-detection-testing-setup, docker-detection-testing-execution]
steps:
- name: Get branch and PR required for detection testing main.py
id: vars
run: |
echo "::set-output name=branch::${GITHUB_REF#refs/heads/}"
- name: Checkout Repo
uses: actions/checkout@v2
#with:
# ref: develop
- name: Download artifacts
uses: actions/download-artifact@v2
with:
name: config_tests_0.json.results
path: bin/docker_detection_tester/results_0
- name: Download artifacts
uses: actions/download-artifact@v2
with:
name: config_tests_1.json.results
path: bin/docker_detection_tester/results_1
- name: Download artifacts
uses: actions/download-artifact@v2
with:
name: config_tests_2.json.results
path: bin/docker_detection_tester/results_2
- name: Download artifacts
uses: actions/download-artifact@v2
with:
name: config_tests_3.json.results
path: bin/docker_detection_tester/results_3
- name: Download artifacts
uses: actions/download-artifact@v2
with:
name: config_tests_4.json.results
path: bin/docker_detection_tester/results_4
- name: Download artifacts
uses: actions/download-artifact@v2
with:
name: config_tests_5.json.results
path: bin/docker_detection_tester/results_5
- name: Download artifacts
uses: actions/download-artifact@v2
with:
name: config_tests_6.json.results
path: bin/docker_detection_tester/results_6
- name: Download artifacts
uses: actions/download-artifact@v2
with:
name: config_tests_7.json.results
path: bin/docker_detection_tester/results_7
- name: Download artifacts
uses: actions/download-artifact@v2
with:
name: config_tests_8.json.results
path: bin/docker_detection_tester/results_8
- name: Download artifacts
uses: actions/download-artifact@v2
with:
name: config_tests_9.json.results
path: bin/docker_detection_tester/results_9
- uses: actions/setup-python@v2
with:
python-version: '3.9' #Available versions here - https://github.com/actions/python-versions/releases easy to change/make a matrix/use pypy
architecture: 'x64' # optional x64 or x86. Defaults to x64 if not specified
cache: 'pip'
- name: Install Python Dependencies
run: |
python -m venv .venv
source .venv/bin/activate
python -m pip install wheel
python -m pip install -r requirements.txt
- name: Merge Detections into single File
run: |
source .venv/bin/activate
cd bin/docker_detection_tester
python summarize_json.py --files results_*/summary.json --output_filename summary_test_results.json
- name: Upload Summary Test Results JSON
uses: actions/upload-artifact@v2
if: always()
with:
name: SummaryTestResults
path: |
bin/docker_detection_tester/summary_test_results.json
- name: Upload Failures Manifest on Failure
uses: actions/upload-artifact@v2
if: failure()
with:
name: DetectionFailureManifest
path: |
bin/docker_detection_tester/detection_failure_manifest.json
#Always clean these up, they make the output messy
- name: Clean up intermediate Files
uses: geekyeggo/delete-artifact@v1
if: always()
with:
name: |
config_tests_0.json.results
config_tests_1.json.results
config_tests_2.json.results
config_tests_3.json.results
config_tests_4.json.results
config_tests_5.json.results
config_tests_6.json.results
config_tests_7.json.results
config_tests_8.json.results
config_tests_9.json.results
- name: Log in to S3 for Artifact Uploads
if: ${{ github.event_name == 'schedule' }}
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-west-2
- name: Upload S3 Badge and Summary Artifacts for Nightly Scheduled Run
if: ${{ github.event_name == 'schedule' }}
run: |
cd bin/docker_detection_tester
python generate_detection_coverage_badge.py --input_summary_file summary_test_results.json --output_badge_file detection_coverage.svg --badge_string "Pass Rate"
#Upload artifact (summary test results)
aws s3 cp summary_test_results.json s3://security-content/reporting/summary_test_results.json
#Since these reside in a public bucket, no need to explicitly mark as public
# make the file public since it is not by default
#aws s3api put-object-acl --bucket security-content --key reporting/summary_test_results.json --acl public-read
#Upload artifact (test results coverage badge)
aws s3 cp detection_coverage.svg s3://security-content/reporting/detection_coverage.svg
#Since these reside in a public bucket, no need to explicitly mark as public
# make the file public since it is not by default
#aws s3api put-object-acl --bucket security-content --key reporting/detection_coverage.svg --acl public-read