diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml new file mode 100644 index 000000000..2d3d7060f --- /dev/null +++ b/.github/workflows/backport.yml @@ -0,0 +1,29 @@ + +name: Backport +on: + pull_request_target: + types: + - closed + - labeled + +jobs: + backport: + runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write + name: Backport + steps: + - name: GitHub App token + id: github_app_token + uses: tibdex/github-app-token@v1.5.0 + with: + app_id: ${{ secrets.APP_ID }} + private_key: ${{ secrets.APP_PRIVATE_KEY }} + installation_id: 22958780 + + - name: Backport + uses: VachaShah/backport@v1.1.4 + with: + github_token: ${{ steps.github_app_token.outputs.token }} + branch_name: backport/backport-${{ github.event.number }} \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index efcc81de6..c2ab4d407 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -18,11 +18,11 @@ jobs: os: [ ubuntu-latest, windows-latest, macos-latest ] include: - os: windows-latest - os_build_args: -x integTest -x jacocoTestReport + os_build_args: -x jacocoTestReport working_directory: X:\ os_java_options: -Xmx4096M - os: macos-latest - os_build_args: -x integTest -x jacocoTestReport + os_build_args: -x jacocoTestReport name: Build and Test security-analytics with JDK ${{ matrix.java }} on ${{ matrix.os }} runs-on: ${{ matrix.os }} @@ -59,6 +59,27 @@ jobs: with: token: ${{ secrets.CODECOV_TOKEN }} + - name: Upload failed logs + uses: actions/upload-artifact@v2 + if: ${{ failure() && matrix.os == 'ubuntu-latest' }} + with: + name: logs-ubuntu + path: build/testclusters/integTest-*/logs/* + + - name: Upload failed logs + uses: actions/upload-artifact@v2 + if: ${{ failure() && matrix.os == 'macos-latest' }} + with: + name: logs-mac + path: build/testclusters/integTest-*/logs/* + + - name: Upload failed logs + uses: actions/upload-artifact@v2 + if: ${{ failure() && matrix.os == 'windows-latest' }} + with: + name: logs-windows + path: build\testclusters\integTest-*\logs\* + - name: Upload Artifacts uses: actions/upload-artifact@v1 with: diff --git a/.github/workflows/delete_backport_branch.yml b/.github/workflows/delete_backport_branch.yml new file mode 100644 index 000000000..39b5b3424 --- /dev/null +++ b/.github/workflows/delete_backport_branch.yml @@ -0,0 +1,20 @@ +## +# Copyright OpenSearch Contributors +# SPDX-License-Identifier: Apache-2.0 +## + +name: Delete merged branch of the backport PRs +on: + pull_request: + types: + - closed + +jobs: + delete-branch: + runs-on: ubuntu-latest + if: startsWith(github.event.pull_request.head.ref,'backport/') + steps: + - name: Delete merged branch + uses: SvanBoxel/delete-merged-branch@main + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/maven-publish.yml b/.github/workflows/maven-publish.yml new file mode 100644 index 000000000..609ef7b82 --- /dev/null +++ b/.github/workflows/maven-publish.yml @@ -0,0 +1,39 @@ +name: Publish snapshots to maven + +on: + workflow_dispatch: + push: + branches: [ + main + 1.* + 2.* + ] + +jobs: + build-and-publish-snapshots: + strategy: + fail-fast: false + if: github.repository == 'opensearch-project/security-analytics' + runs-on: ubuntu-latest + + permissions: + id-token: write + contents: write + + steps: + - uses: actions/setup-java@v3 + with: + distribution: temurin # Temurin is a distribution of adoptium + java-version: 17 + - uses: actions/checkout@v3 + - uses: aws-actions/configure-aws-credentials@v1 + with: + role-to-assume: ${{ secrets.PUBLISH_SNAPSHOTS_ROLE }} + aws-region: us-east-1 + - name: publish snapshots to maven + run: | + export SONATYPE_USERNAME=$(aws secretsmanager get-secret-value --secret-id maven-snapshots-username --query SecretString --output text) + export SONATYPE_PASSWORD=$(aws secretsmanager get-secret-value --secret-id maven-snapshots-password --query SecretString --output text) + echo "::add-mask::$SONATYPE_USERNAME" + echo "::add-mask::$SONATYPE_PASSWORD" + ./gradlew publishPluginZipPublicationToSnapshotsRepository diff --git a/.github/workflows/multi-node-test-workflow.yml b/.github/workflows/multi-node-test-workflow.yml index 1cab69173..d7c4a1a0c 100644 --- a/.github/workflows/multi-node-test-workflow.yml +++ b/.github/workflows/multi-node-test-workflow.yml @@ -28,3 +28,9 @@ jobs: uses: actions/checkout@v2 - name: Run integration tests with multi node config run: ./gradlew integTest -PnumNodes=3 + - name: Upload failed logs + uses: actions/upload-artifact@v2 + if: failure() + with: + name: logs + path: build/testclusters/integTest-*/logs/* \ No newline at end of file diff --git a/.github/workflows/security-test-workflow.yml b/.github/workflows/security-test-workflow.yml new file mode 100644 index 000000000..f600314e2 --- /dev/null +++ b/.github/workflows/security-test-workflow.yml @@ -0,0 +1,88 @@ +name: Security Test Workflow +# This workflow is triggered on pull requests and pushes to main or an OpenSearch release branch +on: + pull_request: + branches: + - "*" + push: + branches: + - "*" + +jobs: + build: + strategy: + matrix: + java: [ 11, 17 ] + # Job name + name: Build and test SecurityAnalytics + # This job runs on Linux + runs-on: ubuntu-latest + steps: + # This step uses the setup-java Github action: https://github.com/actions/setup-java + - name: Set Up JDK ${{ matrix.java }} + uses: actions/setup-java@v1 + with: + java-version: ${{ matrix.java }} + # This step uses the checkout Github action: https://github.com/actions/checkout + - name: Checkout Branch + uses: actions/checkout@v2 + # This step uses the setup-java Github action: https://github.com/actions/setup-java + - name: Set Up JDK ${{ matrix.java }} + uses: actions/setup-java@v1 + with: + java-version: ${{ matrix.java }} + - name: Build SecurityAnalytics + # Only assembling since the full build is governed by other workflows + run: ./gradlew assemble + + - name: Pull and Run Docker + run: | + plugin=`basename $(ls build/distributions/*.zip)` + list_of_files=`ls` + list_of_all_files=`ls build/distributions/` + version=`echo $plugin|awk -F- '{print $4}'| cut -d. -f 1-3` + plugin_version=`echo $plugin|awk -F- '{print $4}'| cut -d. -f 1-4` + qualifier=`echo $plugin|awk -F- '{print $4}'| cut -d. -f 1-1` + candidate_version=`echo $plugin|awk -F- '{print $5}'| cut -d. -f 1-1` + docker_version=$version + + [[ -z $candidate_version ]] && candidate_version=$qualifier && qualifier="" + + echo plugin version plugin_version qualifier candidate_version docker_version + echo "($plugin) ($version) ($plugin_version) ($qualifier) ($candidate_version) ($docker_version)" + echo $ls $list_of_all_files + + if docker pull opensearchstaging/opensearch:$docker_version + then + echo "FROM opensearchstaging/opensearch:$docker_version" >> Dockerfile + echo "RUN if [ -d /usr/share/opensearch/plugins/opensearch-security-analytics ]; then /usr/share/opensearch/bin/opensearch-plugin remove opensearch-security-analytics; fi" >> Dockerfile + echo "ADD build/distributions/$plugin /tmp/" >> Dockerfile + echo "RUN /usr/share/opensearch/bin/opensearch-plugin install --batch file:/tmp/$plugin" >> Dockerfile + + docker build -t opensearch-security-analytics:test . + echo "imagePresent=true" >> $GITHUB_ENV + else + echo "imagePresent=false" >> $GITHUB_ENV + fi + + - name: Run Docker Image + if: env.imagePresent == 'true' + run: | + cd .. + docker run -p 9200:9200 -d -p 9600:9600 -e "discovery.type=single-node" opensearch-security-analytics:test + sleep 120 + + - name: Run SecurityAnalytics Test for security enabled test cases + if: env.imagePresent == 'true' + run: | + cluster_running=`curl -XGET https://localhost:9200/_cat/plugins -u admin:admin --insecure` + echo $cluster_running + security=`curl -XGET https://localhost:9200/_cat/plugins -u admin:admin --insecure |grep opensearch-security|wc -l` + echo $security + if [ $security -gt 0 ] + then + echo "Security plugin is available" + ./gradlew :integTest -Dtests.rest.cluster=localhost:9200 -Dtests.cluster=localhost:9200 -Dtests.clustername=docker-cluster -Dhttps=true -Duser=admin -Dpassword=admin + else + echo "Security plugin is NOT available skipping this run as tests without security have already been run" + fi diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 000000000..3bf1986a0 --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1,7 @@ +@amsiglan +@awshurneyt +@getsaurabh02 +@lezzago +@praveensameneni +@sbcd90 +@eirsep diff --git a/MAINTAINERS.md b/MAINTAINERS.md index cf29f9762..a8735dcce 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -1,9 +1,15 @@ -## Maintainers -| Maintainer | GitHub ID | Affiliation | -|------------------------|-------------------------------------------------| ----------- | -| Saurabh Singh | [getsaurabh02](https://github.com/getsaurabh02) | Amazon | -| Subhobrata Dey | [sbcd90](https://github.com/sbcd90) | Amazon | -| Surya Sashank Nistalai | [eirsep](https://github.com/eirsep) | Amazon | +## Overview +This document contains a list of maintainers in this repo. See [opensearch-project/.github/RESPONSIBILITIES.md](https://github.com/opensearch-project/.github/blob/main/RESPONSIBILITIES.md#maintainer-responsibilities) that explains what the role of maintainer means, what maintainers do in this and other repos, and how they should be doing it. If you're interested in contributing, and becoming a maintainer, see [CONTRIBUTING](CONTRIBUTING.md). -[This document](https://github.com/opensearch-project/.github/blob/main/MAINTAINERS.md) explains what maintainers do in this repo, and how they should be doing it. If you're interested in contributing, see [CONTRIBUTING](CONTRIBUTING.md). \ No newline at end of file +## Current Maintainers + +| Maintainer | GitHub ID | Affiliation | +| ---------------- | ----------------------------------------------------- | ----------- | +| Ashish Agrawal | [lezzago](https://github.com/lezzago) | Amazon | +| Subhobrata Dey | [sbcd90](https://github.com/sbcd90) | Amazon | +| Thomas Hurney | [awshurneyt](https://github.com/AWSHurneyt) | Amazon | +| Surya Sashank Nistala | [eirsep](https://github.com/eirsep) | Amazon | +| Praveen Sameneni | [praveensameneni](https://github.com/praveensameneni) | Amazon | +| Amardeepsingh Siglani | [amsiglan](https://github.com/amsiglan) | Amazon | +| Saurabh Singh | [getsaurabh02](https://github.com/getsaurabh02) | Amazon | diff --git a/build.gradle b/build.gradle index 109244ac0..238ae3f94 100644 --- a/build.gradle +++ b/build.gradle @@ -6,7 +6,7 @@ import org.opensearch.gradle.test.RestIntegTestTask buildscript { ext { - opensearch_version = System.getProperty("opensearch.version", "2.4.0-SNAPSHOT") + opensearch_version = System.getProperty("opensearch.version", "2.7.1-SNAPSHOT") isSnapshot = "true" == System.getProperty("build.snapshot", "true") buildVersionQualifier = System.getProperty("build.version_qualifier", "") version_tokens = opensearch_version.tokenize('-') @@ -53,7 +53,7 @@ ext { licenseHeaders.enabled = true testingConventions.enabled = false -forbiddenApis.ignoreFailures = false +forbiddenApis.ignoreFailures = true dependencyLicenses.enabled = false thirdPartyAudit.enabled = false @@ -98,6 +98,7 @@ publishing { publications { pluginZip(MavenPublication) { publication -> pom { + groupId = "org.opensearch.plugin" name = "opensearch-security-analytics" description = "OpenSearch Security Analytics plugin" licenses { @@ -115,6 +116,17 @@ publishing { } } } + + repositories { + maven { + name = "Snapshots" + url = "https://aws.oss.sonatype.org/content/repositories/snapshots" + credentials { + username "$System.env.SONATYPE_USERNAME" + password "$System.env.SONATYPE_PASSWORD" + } + } + } } repositories { @@ -122,17 +134,28 @@ repositories { mavenCentral() maven { url "https://aws.oss.sonatype.org/content/repositories/snapshots" } } +configurations.all { + resolutionStrategy.cacheChangingModulesFor 0, 'seconds' +} sourceSets.main.java.srcDirs = ['src/main/generated','src/main/java'] +configurations { + zipArchive +} dependencies { javaRestTestImplementation project.sourceSets.main.runtimeClasspath implementation group: 'org.apache.commons', name: 'commons-lang3', version: '3.12.0' implementation "org.antlr:antlr4-runtime:4.10.1" implementation "com.cronutils:cron-utils:9.1.6" - api "org.opensearch:common-utils:${common_utils_version}" + api "org.opensearch:common-utils:${common_utils_version}@jar" api "org.opensearch.client:opensearch-rest-client:${opensearch_version}" implementation "org.jetbrains.kotlin:kotlin-stdlib:${kotlin_version}" + + // Needed for integ tests + zipArchive group: 'org.opensearch.plugin', name:'alerting', version: "${opensearch_build}" + zipArchive group: 'org.opensearch.plugin', name:'opensearch-notifications-core', version: "${opensearch_build}" + zipArchive group: 'org.opensearch.plugin', name:'notifications', version: "${opensearch_build}" } // RPM & Debian build @@ -166,6 +189,21 @@ integTest { systemProperty "https", System.getProperty("https") systemProperty "user", System.getProperty("user") systemProperty "password", System.getProperty("password") + + if (System.getProperty("https") == null || System.getProperty("https") == "false") { + filter { + excludeTestsMatching "org.opensearch.securityanalytics.resthandler.Secure*RestApiIT" + excludeTestsMatching "org.opensearch.securityanalytics.findings.Secure*RestApiIT" + excludeTestsMatching "org.opensearch.securityanalytics.alerts.Secure*RestApiIT" + } + } + + if (System.getProperty("https") != null || System.getProperty("https") == "true") { + filter { + excludeTestsMatching "org.opensearch.securityanalytics.*TransportIT" + } + } + // Tell the test JVM if the cluster JVM is running under a debugger so that tests can use longer timeouts for // requests. The 'doFirst' delays reading the debug setting on the cluster till execution time. doFirst { @@ -193,15 +231,6 @@ integTest.getClusters().forEach{c -> { c.plugin(project.getObjects().fileProperty().value(bundle.getArchiveFile())) }} -String alertingFilePath = "src/test/resources/alerting" -String alertingPlugin = "opensearch-alerting-" + plugin_no_snapshot + ".zip" -String alertingRemoteFile = "https://ci.opensearch.org/ci/dbc/distribution-build-opensearch/" + opensearch_no_snapshot + "/latest/linux/x64/tar/builds/opensearch/plugins/" + alertingPlugin -String notificationsFilePath = "src/test/resources/notifications" -String notificationsCoreFilePath = "src/test/resources/notifications-core" -String notificationsPlugin = "opensearch-notifications-" + plugin_no_snapshot + ".zip" -String notificationsCorePlugin = "opensearch-notifications-core-" + plugin_no_snapshot + ".zip" -String notificationsRemoteFile = "https://ci.opensearch.org/ci/dbc/distribution-build-opensearch/" + opensearch_no_snapshot + "/latest/linux/x64/tar/builds/opensearch/plugins/" + notificationsPlugin -String notificationsCoreRemoteFile = "https://ci.opensearch.org/ci/dbc/distribution-build-opensearch/" + opensearch_no_snapshot + "/latest/linux/x64/tar/builds/opensearch/plugins/" + notificationsCorePlugin testClusters.integTest { testDistribution = 'ARCHIVE' @@ -222,17 +251,9 @@ testClusters.integTest { new RegularFile() { @Override File getAsFile() { - File dir = new File(rootDir.path + "/" + alertingFilePath) - - if (!dir.exists()) { - dir.mkdirs() - } - - File f = new File(dir, alertingPlugin) - if (!f.exists()) { - new URL(alertingRemoteFile).withInputStream{ ins -> f.withOutputStream{ it << ins }} - } - fileTree(alertingFilePath).getSingleFile() + return configurations.zipArchive.asFileTree.matching { + include '**/opensearch-notifications-core*' + }.singleFile } } })) @@ -240,17 +261,9 @@ testClusters.integTest { new RegularFile() { @Override File getAsFile() { - File dir = new File(rootDir.path + "/" + notificationsCoreFilePath) - - if (!dir.exists()) { - dir.mkdirs() - } - - File f = new File(dir, notificationsCorePlugin) - if (!f.exists()) { - new URL(notificationsCoreRemoteFile).withInputStream{ ins -> f.withOutputStream{ it << ins }} - } - fileTree(notificationsCoreFilePath).getSingleFile() + return configurations.zipArchive.asFileTree.matching { + include '**/notifications*' + }.singleFile } } })) @@ -258,17 +271,9 @@ testClusters.integTest { new RegularFile() { @Override File getAsFile() { - File dir = new File(rootDir.path + "/" + notificationsFilePath) - - if (!dir.exists()) { - dir.mkdirs() - } - - File f = new File(dir, notificationsPlugin) - if (!f.exists()) { - new URL(notificationsRemoteFile).withInputStream{ ins -> f.withOutputStream{ it << ins }} - } - fileTree(notificationsFilePath).getSingleFile() + return configurations.zipArchive.asFileTree.matching { + include '**/alerting*' + }.singleFile } } })) @@ -292,13 +297,20 @@ task integTestRemote(type: RestIntegTestTask) { systemProperty 'java.io.tmpdir', opensearch_tmp_dir.absolutePath systemProperty "https", System.getProperty("https") - systemProperty "security", System.getProperty("security") systemProperty "user", System.getProperty("user") systemProperty "password", System.getProperty("password") if (System.getProperty("tests.rest.cluster") != null) { filter { - includeTestsMatching "org.opensearch.securityanalytics.*RestIT" + includeTestsMatching "org.opensearch.securityanalytics.*RestApiIT" + } + } + + if (System.getProperty("https") == null || System.getProperty("https") == "false") { + filter { + excludeTestsMatching "org.opensearch.securityanalytics.resthandler.Secure*RestApiIT" + excludeTestsMatching "org.opensearch.securityanalytics.findings.Secure*RestApiIT" + excludeTestsMatching "org.opensearch.securityanalytics.alerts.Secure*RestApiIT" } } } diff --git a/release-notes/opensearch-security-analytics.release-notes-2.4.0.0.md b/release-notes/opensearch-security-analytics.release-notes-2.4.0.0.md new file mode 100644 index 000000000..fcbf9bc1d --- /dev/null +++ b/release-notes/opensearch-security-analytics.release-notes-2.4.0.0.md @@ -0,0 +1,40 @@ +## Version 2.4.0.0 Release Notes + +Compatible with OpenSearch 2.4.0 +Initial release of `opensearch-security-analytics` plugin + +### Features + +* Sigma Rules, Rule Engine Parser ([#6](https://github.com/opensearch-project/security-analytics/pull/6), [#8](https://github.com/opensearch-project/security-analytics/pull/8), [#26](https://github.com/opensearch-project/security-analytics/pull/26), [#27](https://github.com/opensearch-project/security-analytics/pull/27)) +* Threat Detector Lifecycle Management (CRUD), Pre-packaged/Custom Rule Lifecycle Management (CRUD) ([#32](https://github.com/opensearch-project/security-analytics/pull/32), [#40](https://github.com/opensearch-project/security-analytics/pull/40), [#43](https://github.com/opensearch-project/security-analytics/pull/43), [#48](https://github.com/opensearch-project/security-analytics/pull/48), [#52](https://github.com/opensearch-project/security-analytics/pull/52), [#80](https://github.com/opensearch-project/security-analytics/pull/80)) +* Mapping Logs/Rule fields to ECS(Elastic Common Schema) format ([#30](https://github.com/opensearch-project/security-analytics/pull/30), [#35](https://github.com/opensearch-project/security-analytics/pull/35), [#46](https://github.com/opensearch-project/security-analytics/pull/46), [#46](https://github.com/opensearch-project/security-analytics/pull/46), [#89](https://github.com/opensearch-project/security-analytics/pull/89)) +* Integrate Findings (Lifecycle Management including Rollovers), Triggers, Alerts(Lifecycle Management) ([#39](https://github.com/opensearch-project/security-analytics/pull/39), [#54](https://github.com/opensearch-project/security-analytics/pull/54), [#67](https://github.com/opensearch-project/security-analytics/pull/67), [#70](https://github.com/opensearch-project/security-analytics/pull/70), [#70](https://github.com/opensearch-project/security-analytics/pull/70), [#82](https://github.com/opensearch-project/security-analytics/pull/82)) +* Integrate with Notifications, Acknowledge Alerts ([#71](https://github.com/opensearch-project/security-analytics/pull/71), [#75](https://github.com/opensearch-project/security-analytics/pull/75), [#85](https://github.com/opensearch-project/security-analytics/pull/85)) +* Integrate with Security, implement RBAC, backend roles filtering ([#78](https://github.com/opensearch-project/security-analytics/pull/78)) + +### Enhancements + +* Use of `custom datasources while creating alerting monitors` in `opensearch-security-analytics` ([#34](https://github.com/opensearch-project/security-analytics/pull/34), [#72](https://github.com/opensearch-project/security-analytics/pull/72), [#99](https://github.com/opensearch-project/security-analytics/pull/99)) +* add owner field in monitor to seggregate `opensearch-security-analytics` specific data from `opensearch-alerting` data. ([#110](https://github.com/opensearch-project/security-analytics/pull/110)) + +### Bug Fixes + +* fix bug to support aliasMappings in create mappings api ([#69](https://github.com/opensearch-project/security-analytics/pull/69)) +* fix for multi-node test faiures on rule ingestion ([#76](https://github.com/opensearch-project/security-analytics/pull/76)) +* fix bug on deleting/updating rule when it is not used by detectors ([#77](https://github.com/opensearch-project/security-analytics/pull/77)) +* fix build for delete detector api ([#97](https://github.com/opensearch-project/security-analytics/pull/97)) +* findingsDto assign detectorId bug ([#102](https://github.com/opensearch-project/security-analytics/pull/102)) +* update index monitor method to include namedWriteableRegistry for common utils interface ([#105](https://github.com/opensearch-project/security-analytics/pull/105)) + +### Infrastructure + +* Initial commit for setting up the `opensearch-security-analytics` plugin ([#3](https://github.com/opensearch-project/security-analytics/pull/3)) +* Add support for windows builds ([#84](https://github.com/opensearch-project/security-analytics/pull/84)) +* Add backport workflow in GitHub workflows ([#93](https://github.com/opensearch-project/security-analytics/pull/93), [#113](https://github.com/opensearch-project/security-analytics/pull/113)) +* Change `groupid` in `build.gradle` ([#91](https://github.com/opensearch-project/security-analytics/pull/91)) +* Add `build.sh` to generate `maven artifacts` ([#87](https://github.com/opensearch-project/security-analytics/pull/87)) + +### Documentation + +* Update `README` ([#1](https://github.com/opensearch-project/security-analytics/pull/1)) +* Add `MAINTAINERS.md` file ([#83](https://github.com/opensearch-project/security-analytics/pull/83)) \ No newline at end of file diff --git a/release-notes/opensearch-security-analytics.release-notes-2.4.1.0.md b/release-notes/opensearch-security-analytics.release-notes-2.4.1.0.md new file mode 100644 index 000000000..2a610e54e --- /dev/null +++ b/release-notes/opensearch-security-analytics.release-notes-2.4.1.0.md @@ -0,0 +1,7 @@ +## Version 2.4.1.0 Release Notes + +Compatible with OpenSearch 2.4.1 + +### Bug Fixes + +* fix for running windows integration tests ([#176](https://github.com/opensearch-project/security-analytics/pull/176)) \ No newline at end of file diff --git a/release-notes/opensearch-security-analytics.release-notes-2.5.0.0.md b/release-notes/opensearch-security-analytics.release-notes-2.5.0.0.md new file mode 100644 index 000000000..6a8f6485e --- /dev/null +++ b/release-notes/opensearch-security-analytics.release-notes-2.5.0.0.md @@ -0,0 +1,32 @@ +## Version 2.5.0.0 Release Notes +Compatible with OpenSearch 2.5.0 + +### Maintenance +* Bumped version to 2.5. ([#215](https://github.com/opensearch-project/security-analytics/pull/215)) +* Updated MAINTAINERS.md format. ([#240](https://github.com/opensearch-project/security-analytics/pull/240)) + +### Features +* Implement secure transport action for get alerts and ack alerts. ([#161](https://github.com/opensearch-project/security-analytics/pull/161)) +* GetMappingsView API - index pattern/alias/datastream support. ([#245](https://github.com/opensearch-project/security-analytics/pull/245)) +* Createmappings api index pattern support. ([#260](https://github.com/opensearch-project/security-analytics/pull/260)) + +### Bug Fixes +* Fixed aliases being returned in unmapped_index_fields. ([#147](https://github.com/opensearch-project/security-analytics/pull/147)) +* Fix vulnerability in yaml constructor. ([#198](https://github.com/opensearch-project/security-analytics/pull/198)) +* Fix flaky integration tests for security analytics. ([#241](https://github.com/opensearch-project/security-analytics/pull/241)) +* Fixed SecureFindingRestApiIT. Removed uppercasing of the detector type. ([#247](https://github.com/opensearch-project/security-analytics/pull/247)) +* Fix ci builds for security-analytics. ([#253](https://github.com/opensearch-project/security-analytics/pull/253)) + +### Refactoring +* Search returns detector type in CAPS fix and integration tests. ([#174](https://github.com/opensearch-project/security-analytics/pull/174)) +* Added dummy search when creating detector on the given indices. ([#197](https://github.com/opensearch-project/security-analytics/pull/197)) +* Updated network mappings. ([#211](https://github.com/opensearch-project/security-analytics/pull/211)) +* Updated windows mappings. ([#212](https://github.com/opensearch-project/security-analytics/pull/212)) +* Updated ad_ldap mappings. ([#213](https://github.com/opensearch-project/security-analytics/pull/213)) +* Removed create/delete queryIndex. ([#215](https://github.com/opensearch-project/security-analytics/pull/215)) +* Update Linux mappings. ([#223](https://github.com/opensearch-project/security-analytics/pull/223)) +* Changes to return empty search response for custom rules. ([#231](https://github.com/opensearch-project/security-analytics/pull/231)) +* Service Returns Unhandled Error Response. ([#248](https://github.com/opensearch-project/security-analytics/pull/248)) + +### Documentation +* Added 2.5 release notes. ([#268](https://github.com/opensearch-project/security-analytics/pull/268)) \ No newline at end of file diff --git a/release-notes/opensearch-security-analytics.release-notes-2.6.0.0.md b/release-notes/opensearch-security-analytics.release-notes-2.6.0.0.md new file mode 100644 index 000000000..560025f1e --- /dev/null +++ b/release-notes/opensearch-security-analytics.release-notes-2.6.0.0.md @@ -0,0 +1,31 @@ +## Version 2.6.0.0 Release Notes + +Compatible with OpenSearch 2.6.0 + +### Features +* GetIndexMappings index pattern support. ([#265](https://github.com/opensearch-project/security-analytics/pull/265)) +* Added API to fetch all log types/rule categories. ([#327](https://github.com/opensearch-project/security-analytics/pull/327)) + +### Enhancement +* Adds timestamp field alias and sets time range filter in bucket level monitor. ([#262](https://github.com/opensearch-project/security-analytics/pull/262)) +* Update others_application mappings. ([#277](https://github.com/opensearch-project/security-analytics/pull/277)) +* Update others_apt mappings. ([#278](https://github.com/opensearch-project/security-analytics/pull/278)) +* Index template conflict resolve; GetIndexMappings API changes. ([#283](https://github.com/opensearch-project/security-analytics/pull/283)) +* Add nesting level to yaml constructor. ([#286](https://github.com/opensearch-project/security-analytics/pull/286)) +* Update others_cloud mappings. ([#301](https://github.com/opensearch-project/security-analytics/pull/301)) +* Update others_compliance mappings. ([#302](https://github.com/opensearch-project/security-analytics/pull/302)) +* Update others_web mappings. ([#304](https://github.com/opensearch-project/security-analytics/pull/304)) +* Log message change for debugging. ([#321](https://github.com/opensearch-project/security-analytics/pull/321)) + +### Bug Fixes +* Service Returns Unhandled Error Response. ([#248](https://github.com/opensearch-project/security-analytics/pull/248)) +* Correct linux mapping error. ([#263](https://github.com/opensearch-project/security-analytics/pull/263)) +* GetIndexMapping API timestamp alias bugfix. ([#293](https://github.com/opensearch-project/security-analytics/pull/293)) +* Query_field_names bugfix. ([#335](https://github.com/opensearch-project/security-analytics/pull/335)) + +### Maintenance +* Baselined MAINTAINERS and CODEOWNERS docs. ([#329](https://github.com/opensearch-project/security-analytics/pull/329)) +* Bumped version to 2.6. ([#351](https://github.com/opensearch-project/security-analytics/pull/351)) + +### Documentation +* Added 2.6 release notes. ([#353](https://github.com/opensearch-project/security-analytics/pull/353)) \ No newline at end of file diff --git a/release-notes/opensearch-security-analytics.release-notes-2.7.0.0.md b/release-notes/opensearch-security-analytics.release-notes-2.7.0.0.md new file mode 100644 index 000000000..3b998624c --- /dev/null +++ b/release-notes/opensearch-security-analytics.release-notes-2.7.0.0.md @@ -0,0 +1,21 @@ +## Version 2.7.0.0 2023-04-17 + +Compatible with OpenSearch 2.7.0 + +### Features +* New log types. ([#332](https://github.com/opensearch-project/security-analytics/pull/332)) +* Support for multiple indices in detector input. ([#336](https://github.com/opensearch-project/security-analytics/pull/336)) + +### Refactoring +* Index template cleanup. ([#317](https://github.com/opensearch-project/security-analytics/pull/317)) +* Handle monitor or monitor index not found during detector deletion. ([#384](https://github.com/opensearch-project/security-analytics/pull/384)) +* Handle index not exists for detector search and delete. ([#396](https://github.com/opensearch-project/security-analytics/pull/396)) + +### Bug Fixes +* Fix for integ test failures. ([#363](https://github.com/opensearch-project/security-analytics/pull/363)) + +### Maintenance +* Bumped version to 2.7. ([#387](https://github.com/opensearch-project/security-analytics/pull/387)) + +### Documentation +* Added 2.7 release notes. ([#]()) \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java index e0a974740..4d5194eec 100644 --- a/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java +++ b/src/main/java/org/opensearch/securityanalytics/SecurityAnalyticsPlugin.java @@ -5,7 +5,10 @@ package org.opensearch.securityanalytics; import java.util.Collection; +import java.util.Collections; import java.util.List; +import java.util.Map; +import java.util.Optional; import java.util.function.Supplier; import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionResponse; @@ -13,75 +16,52 @@ import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.node.DiscoveryNodes; import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.component.LifecycleComponent; import org.opensearch.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.IndexScopedSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.SettingsFilter; -import org.opensearch.common.xcontent.NamedXContentRegistry; +import org.opensearch.commons.alerting.action.AlertingActions; +import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.env.Environment; import org.opensearch.env.NodeEnvironment; +import org.opensearch.index.IndexSettings; +import org.opensearch.index.codec.CodecServiceFactory; +import org.opensearch.index.engine.EngineFactory; +import org.opensearch.index.mapper.Mapper; import org.opensearch.plugins.ActionPlugin; +import org.opensearch.plugins.EnginePlugin; +import org.opensearch.plugins.MapperPlugin; import org.opensearch.plugins.Plugin; +import org.opensearch.plugins.SearchPlugin; import org.opensearch.repositories.RepositoriesService; import org.opensearch.rest.RestController; import org.opensearch.rest.RestHandler; import org.opensearch.script.ScriptService; -import org.opensearch.securityanalytics.action.AckAlertsAction; -import org.opensearch.securityanalytics.action.CreateIndexMappingsAction; -import org.opensearch.securityanalytics.action.DeleteDetectorAction; -import org.opensearch.securityanalytics.action.GetAlertsAction; -import org.opensearch.securityanalytics.action.GetDetectorAction; -import org.opensearch.securityanalytics.action.GetFindingsAction; -import org.opensearch.securityanalytics.action.GetIndexMappingsAction; -import org.opensearch.securityanalytics.action.GetMappingsViewAction; -import org.opensearch.securityanalytics.action.IndexDetectorAction; -import org.opensearch.securityanalytics.action.SearchDetectorAction; -import org.opensearch.securityanalytics.action.UpdateIndexMappingsAction; +import org.opensearch.securityanalytics.action.*; +import org.opensearch.securityanalytics.correlation.index.codec.CorrelationCodecService; +import org.opensearch.securityanalytics.correlation.index.mapper.CorrelationVectorFieldMapper; +import org.opensearch.securityanalytics.correlation.index.query.CorrelationQueryBuilder; +import org.opensearch.securityanalytics.indexmanagment.DetectorIndexManagementService; +import org.opensearch.securityanalytics.mapper.IndexTemplateManager; import org.opensearch.securityanalytics.mapper.MapperService; -import org.opensearch.securityanalytics.resthandler.RestAcknowledgeAlertsAction; -import org.opensearch.securityanalytics.resthandler.RestGetFindingsAction; -import org.opensearch.securityanalytics.transport.TransportAcknowledgeAlertsAction; -import org.opensearch.securityanalytics.transport.TransportCreateIndexMappingsAction; -import org.opensearch.securityanalytics.transport.TransportGetFindingsAction; -import org.opensearch.securityanalytics.action.DeleteRuleAction; -import org.opensearch.securityanalytics.action.IndexRuleAction; -import org.opensearch.securityanalytics.action.SearchRuleAction; +import org.opensearch.securityanalytics.resthandler.*; +import org.opensearch.securityanalytics.transport.*; import org.opensearch.securityanalytics.model.Rule; -import org.opensearch.securityanalytics.resthandler.RestDeleteDetectorAction; -import org.opensearch.securityanalytics.resthandler.RestDeleteRuleAction; -import org.opensearch.securityanalytics.resthandler.RestIndexRuleAction; -import org.opensearch.securityanalytics.resthandler.RestSearchRuleAction; -import org.opensearch.securityanalytics.transport.TransportDeleteRuleAction; -import org.opensearch.securityanalytics.transport.TransportIndexRuleAction; -import org.opensearch.securityanalytics.transport.TransportSearchRuleAction; -import org.opensearch.securityanalytics.transport.TransportUpdateIndexMappingsAction; -import org.opensearch.securityanalytics.transport.TransportGetIndexMappingsAction; import org.opensearch.securityanalytics.model.Detector; import org.opensearch.securityanalytics.model.DetectorInput; -import org.opensearch.securityanalytics.resthandler.RestCreateIndexMappingsAction; -import org.opensearch.securityanalytics.resthandler.RestGetAlertsAction; -import org.opensearch.securityanalytics.resthandler.RestGetDetectorAction; -import org.opensearch.securityanalytics.resthandler.RestGetIndexMappingsAction; -import org.opensearch.securityanalytics.resthandler.RestGetMappingsViewAction; -import org.opensearch.securityanalytics.resthandler.RestIndexDetectorAction; -import org.opensearch.securityanalytics.resthandler.RestSearchDetectorAction; -import org.opensearch.securityanalytics.resthandler.RestUpdateIndexMappingsAction; import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; -import org.opensearch.securityanalytics.transport.TransportDeleteDetectorAction; -import org.opensearch.securityanalytics.transport.TransportGetAlertsAction; -import org.opensearch.securityanalytics.transport.TransportGetDetectorAction; -import org.opensearch.securityanalytics.transport.TransportGetMappingsViewAction; -import org.opensearch.securityanalytics.transport.TransportIndexDetectorAction; -import org.opensearch.securityanalytics.transport.TransportSearchDetectorAction; +import org.opensearch.securityanalytics.util.CorrelationIndices; +import org.opensearch.securityanalytics.util.CorrelationRuleIndices; import org.opensearch.securityanalytics.util.DetectorIndices; import org.opensearch.securityanalytics.util.RuleIndices; import org.opensearch.securityanalytics.util.RuleTopicIndices; import org.opensearch.threadpool.ThreadPool; import org.opensearch.watcher.ResourceWatcherService; -public class SecurityAnalyticsPlugin extends Plugin implements ActionPlugin { +public class SecurityAnalyticsPlugin extends Plugin implements ActionPlugin, MapperPlugin, SearchPlugin, EnginePlugin { public static final String PLUGINS_BASE_URI = "/_plugins/_security_analytics"; public static final String MAPPER_BASE_URI = PLUGINS_BASE_URI + "/mappings"; @@ -90,15 +70,26 @@ public class SecurityAnalyticsPlugin extends Plugin implements ActionPlugin { public static final String ALERTS_BASE_URI = PLUGINS_BASE_URI + "/alerts"; public static final String DETECTOR_BASE_URI = PLUGINS_BASE_URI + "/detectors"; public static final String RULE_BASE_URI = PLUGINS_BASE_URI + "/rules"; + public static final String FINDINGS_CORRELATE_URI = FINDINGS_BASE_URI + "/correlate"; + public static final String LIST_CORRELATIONS_URI = PLUGINS_BASE_URI + "/correlations"; + public static final String CORRELATION_RULES_BASE_URI = PLUGINS_BASE_URI + "/correlation/rules"; + + private CorrelationRuleIndices correlationRuleIndices; private DetectorIndices detectorIndices; private RuleTopicIndices ruleTopicIndices; + private CorrelationIndices correlationIndices; + private MapperService mapperService; private RuleIndices ruleIndices; + private DetectorIndexManagementService detectorIndexManagementService; + + private IndexTemplateManager indexTemplateManager; + @Override public Collection createComponents(Client client, ClusterService clusterService, @@ -113,9 +104,18 @@ public Collection createComponents(Client client, Supplier repositoriesServiceSupplier) { detectorIndices = new DetectorIndices(client.admin(), clusterService, threadPool); ruleTopicIndices = new RuleTopicIndices(client, clusterService); - mapperService = new MapperService(client.admin().indices()); + correlationIndices = new CorrelationIndices(client, clusterService); + indexTemplateManager = new IndexTemplateManager(client, clusterService, indexNameExpressionResolver, xContentRegistry); + mapperService = new MapperService(client, clusterService, indexNameExpressionResolver, indexTemplateManager); ruleIndices = new RuleIndices(client, clusterService, threadPool); - return List.of(detectorIndices, ruleTopicIndices, ruleIndices, mapperService); + correlationRuleIndices = new CorrelationRuleIndices(client, clusterService); + + return List.of(detectorIndices, correlationIndices, correlationRuleIndices, ruleTopicIndices, ruleIndices, mapperService, indexTemplateManager); + } + + @Override + public Collection> getGuiceServiceClasses() { + return Collections.singletonList(DetectorIndexManagementService.class); } @Override @@ -140,7 +140,14 @@ public List getRestHandlers(Settings settings, new RestGetAlertsAction(), new RestIndexRuleAction(), new RestSearchRuleAction(), - new RestDeleteRuleAction() + new RestDeleteRuleAction(), + new RestValidateRulesAction(), + new RestGetAllRuleCategoriesAction(), + new RestSearchCorrelationAction(), + new RestIndexCorrelationRuleAction(), + new RestDeleteCorrelationRuleAction(), + new RestListCorrelationAction(), + new RestSearchCorrelationRuleAction() ); } @@ -153,10 +160,51 @@ public List getNamedXContent() { ); } + @Override + public Map getMappers() { + return Collections.singletonMap( + CorrelationVectorFieldMapper.CONTENT_TYPE, + new CorrelationVectorFieldMapper.TypeParser() + ); + } + + @Override + public Optional getEngineFactory(IndexSettings indexSettings) { + return Optional.empty(); + } + + @Override + public Optional getCustomCodecServiceFactory(IndexSettings indexSettings) { + if (indexSettings.getValue(SecurityAnalyticsSettings.IS_CORRELATION_INDEX_SETTING)) { + return Optional.of(CorrelationCodecService::new); + } + return Optional.empty(); + } + + @Override + public List> getQueries() { + return Collections.singletonList(new QuerySpec<>(CorrelationQueryBuilder.NAME, CorrelationQueryBuilder::new, CorrelationQueryBuilder::fromXContent)); + } + @Override public List> getSettings() { return List.of( - SecurityAnalyticsSettings.INDEX_TIMEOUT + SecurityAnalyticsSettings.INDEX_TIMEOUT, + SecurityAnalyticsSettings.FILTER_BY_BACKEND_ROLES, + SecurityAnalyticsSettings.ALERT_HISTORY_ENABLED, + SecurityAnalyticsSettings.ALERT_HISTORY_ROLLOVER_PERIOD, + SecurityAnalyticsSettings.ALERT_HISTORY_INDEX_MAX_AGE, + SecurityAnalyticsSettings.ALERT_HISTORY_MAX_DOCS, + SecurityAnalyticsSettings.ALERT_HISTORY_RETENTION_PERIOD, + SecurityAnalyticsSettings.REQUEST_TIMEOUT, + SecurityAnalyticsSettings.MAX_ACTION_THROTTLE_VALUE, + SecurityAnalyticsSettings.FINDING_HISTORY_ENABLED, + SecurityAnalyticsSettings.FINDING_HISTORY_MAX_DOCS, + SecurityAnalyticsSettings.FINDING_HISTORY_INDEX_MAX_AGE, + SecurityAnalyticsSettings.FINDING_HISTORY_ROLLOVER_PERIOD, + SecurityAnalyticsSettings.FINDING_HISTORY_RETENTION_PERIOD, + SecurityAnalyticsSettings.IS_CORRELATION_INDEX_SETTING, + SecurityAnalyticsSettings.CORRELATION_TIME_WINDOW ); } @@ -176,7 +224,15 @@ public List> getSettings() { new ActionPlugin.ActionHandler<>(GetAlertsAction.INSTANCE, TransportGetAlertsAction.class), new ActionPlugin.ActionHandler<>(IndexRuleAction.INSTANCE, TransportIndexRuleAction.class), new ActionPlugin.ActionHandler<>(SearchRuleAction.INSTANCE, TransportSearchRuleAction.class), - new ActionPlugin.ActionHandler<>(DeleteRuleAction.INSTANCE, TransportDeleteRuleAction.class) + new ActionPlugin.ActionHandler<>(DeleteRuleAction.INSTANCE, TransportDeleteRuleAction.class), + new ActionPlugin.ActionHandler<>(ValidateRulesAction.INSTANCE, TransportValidateRulesAction.class), + new ActionPlugin.ActionHandler<>(GetAllRuleCategoriesAction.INSTANCE, TransportGetAllRuleCategoriesAction.class), + new ActionPlugin.ActionHandler<>(CorrelatedFindingAction.INSTANCE, TransportSearchCorrelationAction.class), + new ActionPlugin.ActionHandler<>(IndexCorrelationRuleAction.INSTANCE, TransportIndexCorrelationRuleAction.class), + new ActionPlugin.ActionHandler<>(DeleteCorrelationRuleAction.INSTANCE, TransportDeleteCorrelationRuleAction.class), + new ActionPlugin.ActionHandler<>(AlertingActions.SUBSCRIBE_FINDINGS_ACTION_TYPE, TransportCorrelateFindingAction.class), + new ActionPlugin.ActionHandler<>(ListCorrelationsAction.INSTANCE, TransportListCorrelationAction.class), + new ActionPlugin.ActionHandler<>(SearchCorrelationRuleAction.INSTANCE, TransportSearchCorrelationRuleAction.class) ); } } \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/action/AckAlertsAction.java b/src/main/java/org/opensearch/securityanalytics/action/AckAlertsAction.java index 212d2c815..3c0ed0ad9 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/AckAlertsAction.java +++ b/src/main/java/org/opensearch/securityanalytics/action/AckAlertsAction.java @@ -6,8 +6,11 @@ import org.opensearch.action.ActionType; +/** + * Acknowledge Alert Action + */ public class AckAlertsAction extends ActionType { - public static final String NAME = "cluster:admin/opendistro/securityanalytics/alerts/ack"; + public static final String NAME = "cluster:admin/opensearch/securityanalytics/alerts/ack"; public static final AckAlertsAction INSTANCE = new AckAlertsAction(); public AckAlertsAction() { diff --git a/src/main/java/org/opensearch/securityanalytics/action/AckAlertsRequest.java b/src/main/java/org/opensearch/securityanalytics/action/AckAlertsRequest.java index 13fb962d7..09e0dfc61 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/AckAlertsRequest.java +++ b/src/main/java/org/opensearch/securityanalytics/action/AckAlertsRequest.java @@ -9,8 +9,8 @@ import org.opensearch.action.ValidateActions; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentBuilder; import java.io.IOException; import java.util.Collections; diff --git a/src/main/java/org/opensearch/securityanalytics/action/AckAlertsResponse.java b/src/main/java/org/opensearch/securityanalytics/action/AckAlertsResponse.java index 834f7897c..d83bec12c 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/AckAlertsResponse.java +++ b/src/main/java/org/opensearch/securityanalytics/action/AckAlertsResponse.java @@ -7,8 +7,8 @@ import org.opensearch.action.ActionResponse; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; import java.io.IOException; import java.util.Collections; diff --git a/src/main/java/org/opensearch/securityanalytics/action/AlertDto.java b/src/main/java/org/opensearch/securityanalytics/action/AlertDto.java index 1adf8e657..f2b46cdb7 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/AlertDto.java +++ b/src/main/java/org/opensearch/securityanalytics/action/AlertDto.java @@ -11,12 +11,12 @@ import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.io.stream.Writeable; import org.opensearch.common.lucene.uid.Versions; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.commons.alerting.alerts.AlertError; import org.opensearch.commons.alerting.model.ActionExecutionResult; import org.opensearch.commons.alerting.model.AggregationResultBucket; import org.opensearch.commons.alerting.model.Alert; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; public class AlertDto implements ToXContentObject, Writeable { diff --git a/src/main/java/org/opensearch/securityanalytics/action/CorrelatedFindingAction.java b/src/main/java/org/opensearch/securityanalytics/action/CorrelatedFindingAction.java new file mode 100644 index 000000000..f41bdc4aa --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/action/CorrelatedFindingAction.java @@ -0,0 +1,16 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.action; + +import org.opensearch.action.ActionType; + +public class CorrelatedFindingAction extends ActionType { + public static final CorrelatedFindingAction INSTANCE = new CorrelatedFindingAction(); + public static final String NAME = "cluster:admin/opensearch/securityanalytics/correlations/findings"; + + public CorrelatedFindingAction() { + super(NAME, CorrelatedFindingResponse::new); + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/action/CorrelatedFindingRequest.java b/src/main/java/org/opensearch/securityanalytics/action/CorrelatedFindingRequest.java new file mode 100644 index 000000000..bbc22a9ac --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/action/CorrelatedFindingRequest.java @@ -0,0 +1,70 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.action; + +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.securityanalytics.model.Detector; + +import java.io.IOException; + +public class CorrelatedFindingRequest extends ActionRequest { + + private Detector.DetectorType detectorType; + + private String findingId; + + private long timeWindow; + + private int noOfNearbyFindings; + + public CorrelatedFindingRequest(String findingId, Detector.DetectorType detectorType, long timeWindow, int noOfNearbyFindings) { + super(); + this.findingId = findingId; + this.detectorType = detectorType; + this.timeWindow = timeWindow; + this.noOfNearbyFindings = noOfNearbyFindings; + } + + public CorrelatedFindingRequest(StreamInput sin) throws IOException { + this( + sin.readString(), + sin.readEnum(Detector.DetectorType.class), + sin.readLong(), + sin.readInt() + ); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(findingId); + out.writeEnum(detectorType); + out.writeLong(timeWindow); + out.writeInt(noOfNearbyFindings); + } + + public String getFindingId() { + return findingId; + } + + public Detector.DetectorType getDetectorType() { + return detectorType; + } + + public long getTimeWindow() { + return timeWindow; + } + + public int getNoOfNearbyFindings() { + return noOfNearbyFindings; + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/action/CorrelatedFindingResponse.java b/src/main/java/org/opensearch/securityanalytics/action/CorrelatedFindingResponse.java new file mode 100644 index 000000000..3e4dd7eb9 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/action/CorrelatedFindingResponse.java @@ -0,0 +1,47 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.action; + +import org.opensearch.action.ActionResponse; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.securityanalytics.model.FindingWithScore; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; + +public class CorrelatedFindingResponse extends ActionResponse implements ToXContentObject { + + private List findings; + + protected static final String FINDINGS = "findings"; + + public CorrelatedFindingResponse(List findings) { + super(); + this.findings = findings; + } + + public CorrelatedFindingResponse(StreamInput sin) throws IOException { + this( + Collections.unmodifiableList(sin.readList(FindingWithScore::new)) + ); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(findings); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject() + .field(FINDINGS, findings) + .endObject(); + return builder; + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/action/CreateIndexMappingsAction.java b/src/main/java/org/opensearch/securityanalytics/action/CreateIndexMappingsAction.java index c7c0fc8c4..9ddf48b66 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/CreateIndexMappingsAction.java +++ b/src/main/java/org/opensearch/securityanalytics/action/CreateIndexMappingsAction.java @@ -9,7 +9,7 @@ public class CreateIndexMappingsAction extends ActionType{ - public static final String NAME = "cluster:admin/opendistro/securityanalytics/mapping/create"; + public static final String NAME = "cluster:admin/opensearch/securityanalytics/mapping/create"; public static final CreateIndexMappingsAction INSTANCE = new CreateIndexMappingsAction(); diff --git a/src/main/java/org/opensearch/securityanalytics/action/CreateIndexMappingsRequest.java b/src/main/java/org/opensearch/securityanalytics/action/CreateIndexMappingsRequest.java index e7e69e9dd..a0a9ffefc 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/CreateIndexMappingsRequest.java +++ b/src/main/java/org/opensearch/securityanalytics/action/CreateIndexMappingsRequest.java @@ -11,11 +11,11 @@ import org.opensearch.common.Strings; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; import java.io.IOException; import java.util.HashMap; diff --git a/src/main/java/org/opensearch/securityanalytics/action/DeleteCorrelationRuleAction.java b/src/main/java/org/opensearch/securityanalytics/action/DeleteCorrelationRuleAction.java new file mode 100644 index 000000000..b0f905ad3 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/action/DeleteCorrelationRuleAction.java @@ -0,0 +1,22 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.securityanalytics.action; + +import org.opensearch.action.ActionType; +import org.opensearch.action.support.master.AcknowledgedResponse; + +public class DeleteCorrelationRuleAction extends ActionType { + + public static final DeleteCorrelationRuleAction INSTANCE = new DeleteCorrelationRuleAction(); + public static final String NAME = "cluster:admin/index/correlation/rules/delete"; + + private DeleteCorrelationRuleAction() { + super(NAME, AcknowledgedResponse::new); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/action/DeleteCorrelationRuleRequest.java b/src/main/java/org/opensearch/securityanalytics/action/DeleteCorrelationRuleRequest.java new file mode 100644 index 000000000..3bae730c4 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/action/DeleteCorrelationRuleRequest.java @@ -0,0 +1,61 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.securityanalytics.action; + +import java.io.IOException; +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.action.ValidateActions; +import org.opensearch.action.support.WriteRequest; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.rest.RestRequest; +import org.opensearch.securityanalytics.model.CorrelationRule; + +public class DeleteCorrelationRuleRequest extends ActionRequest { + + private String correlationRuleId; + private WriteRequest.RefreshPolicy refreshPolicy; + + public DeleteCorrelationRuleRequest(String correlationRuleId, WriteRequest.RefreshPolicy refreshPolicy) { + super(); + this.correlationRuleId = correlationRuleId; + this.refreshPolicy = refreshPolicy; + } + + public DeleteCorrelationRuleRequest(StreamInput sin) throws IOException { + this(sin.readString(), WriteRequest.RefreshPolicy.readFrom(sin)); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (correlationRuleId == null) { + validationException = ValidateActions.addValidationError("Correlation Rule Id is mandatory!", validationException); + } + if (refreshPolicy == null) { + validationException = ValidateActions.addValidationError("RefreshPolicy is mandatory!", validationException); + } + return validationException; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(correlationRuleId); + refreshPolicy.writeTo(out); + } + + public String getCorrelationRuleId() { + return correlationRuleId; + } + + public WriteRequest.RefreshPolicy getRefreshPolicy() { + return refreshPolicy; + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/action/DeleteDetectorAction.java b/src/main/java/org/opensearch/securityanalytics/action/DeleteDetectorAction.java index ffc87b8c2..c28e1673e 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/DeleteDetectorAction.java +++ b/src/main/java/org/opensearch/securityanalytics/action/DeleteDetectorAction.java @@ -9,7 +9,7 @@ public class DeleteDetectorAction extends ActionType { public static final DeleteDetectorAction INSTANCE = new DeleteDetectorAction(); - public static final String NAME = "cluster:admin/opendistro/securityanalytics/detector/delete"; + public static final String NAME = "cluster:admin/opensearch/securityanalytics/detector/delete"; public DeleteDetectorAction() { super(NAME, DeleteDetectorResponse::new); diff --git a/src/main/java/org/opensearch/securityanalytics/action/DeleteDetectorResponse.java b/src/main/java/org/opensearch/securityanalytics/action/DeleteDetectorResponse.java index bab600d64..4981cbc47 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/DeleteDetectorResponse.java +++ b/src/main/java/org/opensearch/securityanalytics/action/DeleteDetectorResponse.java @@ -7,8 +7,8 @@ import org.opensearch.action.ActionResponse; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.rest.RestStatus; import java.io.IOException; diff --git a/src/main/java/org/opensearch/securityanalytics/action/DeleteRuleAction.java b/src/main/java/org/opensearch/securityanalytics/action/DeleteRuleAction.java index 1092f5e41..ac57e01e3 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/DeleteRuleAction.java +++ b/src/main/java/org/opensearch/securityanalytics/action/DeleteRuleAction.java @@ -9,7 +9,7 @@ public class DeleteRuleAction extends ActionType { public static final DeleteRuleAction INSTANCE = new DeleteRuleAction(); - public static final String NAME = "cluster:admin/opendistro/securityanalytics/rule/delete"; + public static final String NAME = "cluster:admin/opensearch/securityanalytics/rule/delete"; public DeleteRuleAction() { super(NAME, DeleteRuleResponse::new); diff --git a/src/main/java/org/opensearch/securityanalytics/action/DeleteRuleResponse.java b/src/main/java/org/opensearch/securityanalytics/action/DeleteRuleResponse.java index 2169b2346..ffeb6bf4f 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/DeleteRuleResponse.java +++ b/src/main/java/org/opensearch/securityanalytics/action/DeleteRuleResponse.java @@ -7,8 +7,8 @@ import org.opensearch.action.ActionResponse; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.rest.RestStatus; import java.io.IOException; diff --git a/src/main/java/org/opensearch/securityanalytics/action/FindingDto.java b/src/main/java/org/opensearch/securityanalytics/action/FindingDto.java index 6e109aef8..f94a08c81 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/FindingDto.java +++ b/src/main/java/org/opensearch/securityanalytics/action/FindingDto.java @@ -10,12 +10,12 @@ import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.io.stream.Writeable; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.commons.alerting.model.DocLevelQuery; import org.opensearch.commons.alerting.model.FindingDocument; import org.opensearch.commons.alerting.model.FindingWithDocs; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; public class FindingDto implements ToXContentObject, Writeable { @@ -45,7 +45,7 @@ public FindingDto( Instant timestamp, List documents ) { - this.detectorId = id; + this.detectorId = detectorId; this.id = id; this.relatedDocIds = relatedDocIds; this.index = index; @@ -89,4 +89,31 @@ public void writeTo(StreamOutput out) throws IOException { out.writeList(documents); } + public String getId() { + return id; + } + + public List getRelatedDocIds() { + return relatedDocIds; + } + + public String getIndex() { + return index; + } + + public List getDocLevelQueries() { + return docLevelQueries; + } + + public Instant getTimestamp() { + return timestamp; + } + + public List getDocuments() { + return documents; + } + + public String getDetectorId() { + return detectorId; + } } diff --git a/src/main/java/org/opensearch/securityanalytics/action/GetAlertsAction.java b/src/main/java/org/opensearch/securityanalytics/action/GetAlertsAction.java index 1d78ab0a2..df9422a77 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/GetAlertsAction.java +++ b/src/main/java/org/opensearch/securityanalytics/action/GetAlertsAction.java @@ -9,7 +9,7 @@ public class GetAlertsAction extends ActionType { public static final GetAlertsAction INSTANCE = new GetAlertsAction(); - public static final String NAME = "cluster:admin/opendistro/securityanalytics/alerts/get"; + public static final String NAME = "cluster:admin/opensearch/securityanalytics/alerts/get"; public GetAlertsAction() { super(NAME, GetAlertsResponse::new); diff --git a/src/main/java/org/opensearch/securityanalytics/action/GetAlertsResponse.java b/src/main/java/org/opensearch/securityanalytics/action/GetAlertsResponse.java index 78bf88064..7b0f6e01b 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/GetAlertsResponse.java +++ b/src/main/java/org/opensearch/securityanalytics/action/GetAlertsResponse.java @@ -11,10 +11,10 @@ import org.opensearch.action.ActionResponse; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.commons.alerting.model.Alert; import org.opensearch.commons.alerting.model.FindingWithDocs; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.rest.RestStatus; import org.opensearch.securityanalytics.model.Detector; diff --git a/src/main/java/org/opensearch/securityanalytics/action/GetAllRuleCategoriesAction.java b/src/main/java/org/opensearch/securityanalytics/action/GetAllRuleCategoriesAction.java new file mode 100644 index 000000000..3eae5ac31 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/action/GetAllRuleCategoriesAction.java @@ -0,0 +1,17 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.action; + +import org.opensearch.action.ActionType; + +public class GetAllRuleCategoriesAction extends ActionType { + + public static final GetAllRuleCategoriesAction INSTANCE = new GetAllRuleCategoriesAction(); + public static final String NAME = "cluster:admin/opensearch/securityanalytics/rules/categories"; + + public GetAllRuleCategoriesAction() { + super(NAME, GetAllRuleCategoriesResponse::new); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/action/GetAllRuleCategoriesRequest.java b/src/main/java/org/opensearch/securityanalytics/action/GetAllRuleCategoriesRequest.java new file mode 100644 index 000000000..241d7a066 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/action/GetAllRuleCategoriesRequest.java @@ -0,0 +1,32 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.action; + +import java.io.IOException; +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; + +public class GetAllRuleCategoriesRequest extends ActionRequest { + + public GetAllRuleCategoriesRequest() { + super(); + } + public GetAllRuleCategoriesRequest(StreamInput sin) throws IOException { + this(); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + + } + +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/action/GetAllRuleCategoriesResponse.java b/src/main/java/org/opensearch/securityanalytics/action/GetAllRuleCategoriesResponse.java new file mode 100644 index 000000000..c182fac48 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/action/GetAllRuleCategoriesResponse.java @@ -0,0 +1,46 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.action; + +import java.io.IOException; +import java.util.List; +import org.opensearch.action.ActionResponse; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.securityanalytics.model.RuleCategory; + +public class GetAllRuleCategoriesResponse extends ActionResponse implements ToXContentObject { + + private static final String RULE_CATEGORIES = "rule_categories"; + + private List ruleCategories; + + public GetAllRuleCategoriesResponse(List ruleCategories) { + super(); + this.ruleCategories = ruleCategories; + } + + public GetAllRuleCategoriesResponse(StreamInput sin) throws IOException { + this(sin.readList(RuleCategory::new)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeList(ruleCategories); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.startArray(RULE_CATEGORIES); + for (RuleCategory c : ruleCategories) { + c.toXContent(builder, null); + } + builder.endArray(); + return builder.endObject(); + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/action/GetDetectorAction.java b/src/main/java/org/opensearch/securityanalytics/action/GetDetectorAction.java index bb6510ca7..2841f6dab 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/GetDetectorAction.java +++ b/src/main/java/org/opensearch/securityanalytics/action/GetDetectorAction.java @@ -9,7 +9,7 @@ public class GetDetectorAction extends ActionType { public static final GetDetectorAction INSTANCE = new GetDetectorAction(); - public static final String NAME = "cluster:admin/opendistro/securityanalytics/detector/get"; + public static final String NAME = "cluster:admin/opensearch/securityanalytics/detector/get"; public GetDetectorAction() { super(NAME, GetDetectorResponse::new); diff --git a/src/main/java/org/opensearch/securityanalytics/action/GetDetectorRequest.java b/src/main/java/org/opensearch/securityanalytics/action/GetDetectorRequest.java index ee5a10b66..15743690a 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/GetDetectorRequest.java +++ b/src/main/java/org/opensearch/securityanalytics/action/GetDetectorRequest.java @@ -8,8 +8,8 @@ import org.opensearch.action.ActionRequestValidationException; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.core.xcontent.XContentParser; import java.io.IOException; import java.util.Locale; diff --git a/src/main/java/org/opensearch/securityanalytics/action/GetDetectorResponse.java b/src/main/java/org/opensearch/securityanalytics/action/GetDetectorResponse.java index 6e166d5e5..318429bb6 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/GetDetectorResponse.java +++ b/src/main/java/org/opensearch/securityanalytics/action/GetDetectorResponse.java @@ -7,8 +7,8 @@ import org.opensearch.action.ActionResponse; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.rest.RestStatus; import org.opensearch.securityanalytics.model.Detector; diff --git a/src/main/java/org/opensearch/securityanalytics/action/GetFindingsAction.java b/src/main/java/org/opensearch/securityanalytics/action/GetFindingsAction.java index 7cb0c9415..8eb76ee01 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/GetFindingsAction.java +++ b/src/main/java/org/opensearch/securityanalytics/action/GetFindingsAction.java @@ -9,7 +9,7 @@ public class GetFindingsAction extends ActionType { public static final GetFindingsAction INSTANCE = new GetFindingsAction(); - public static final String NAME = "cluster:admin/opendistro/securityanalytics/findings/get"; + public static final String NAME = "cluster:admin/opensearch/securityanalytics/findings/get"; public GetFindingsAction() { super(NAME, GetFindingsResponse::new); diff --git a/src/main/java/org/opensearch/securityanalytics/action/GetFindingsResponse.java b/src/main/java/org/opensearch/securityanalytics/action/GetFindingsResponse.java index 82177b99e..3bc483b91 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/GetFindingsResponse.java +++ b/src/main/java/org/opensearch/securityanalytics/action/GetFindingsResponse.java @@ -11,9 +11,9 @@ import org.opensearch.action.ActionResponse; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.commons.alerting.model.FindingWithDocs; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.rest.RestStatus; import org.opensearch.securityanalytics.model.Detector; diff --git a/src/main/java/org/opensearch/securityanalytics/action/GetIndexMappingsAction.java b/src/main/java/org/opensearch/securityanalytics/action/GetIndexMappingsAction.java index 11adb0e5c..8177bda10 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/GetIndexMappingsAction.java +++ b/src/main/java/org/opensearch/securityanalytics/action/GetIndexMappingsAction.java @@ -8,7 +8,7 @@ public class GetIndexMappingsAction extends ActionType{ - public static final String NAME = "cluster:admin/opendistro/securityanalytics/mapping/get"; + public static final String NAME = "cluster:admin/opensearch/securityanalytics/mapping/get"; public static final GetIndexMappingsAction INSTANCE = new GetIndexMappingsAction(); diff --git a/src/main/java/org/opensearch/securityanalytics/action/GetIndexMappingsRequest.java b/src/main/java/org/opensearch/securityanalytics/action/GetIndexMappingsRequest.java index 46c784e9a..089920350 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/GetIndexMappingsRequest.java +++ b/src/main/java/org/opensearch/securityanalytics/action/GetIndexMappingsRequest.java @@ -8,8 +8,8 @@ import org.opensearch.action.ActionRequestValidationException; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.core.xcontent.XContentParser; import java.io.IOException; import java.util.Locale; diff --git a/src/main/java/org/opensearch/securityanalytics/action/GetIndexMappingsResponse.java b/src/main/java/org/opensearch/securityanalytics/action/GetIndexMappingsResponse.java index 59512d793..5ead9f1d4 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/GetIndexMappingsResponse.java +++ b/src/main/java/org/opensearch/securityanalytics/action/GetIndexMappingsResponse.java @@ -5,22 +5,28 @@ package org.opensearch.securityanalytics.action; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.opensearch.Version; import org.opensearch.action.ActionResponse; import org.opensearch.cluster.metadata.MappingMetadata; -import org.opensearch.common.ParseField; import org.opensearch.common.Strings; import org.opensearch.common.collect.ImmutableOpenMap; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.ParseField; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.mapper.MapperService; import java.io.IOException; public class GetIndexMappingsResponse extends ActionResponse implements ToXContentObject { + private Logger logger = LogManager.getLogger(GetIndexMappingsResponse.class); + private static final ParseField MAPPINGS = new ParseField("mappings"); private final ImmutableOpenMap mappings; @@ -99,7 +105,12 @@ public ImmutableOpenMap getMappings() { @Override public String toString() { - return Strings.toString(this); + try { + return Strings.toString(this.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)); + } catch (IOException e) { + logger.error(e.getMessage()); + return ""; + } } @Override diff --git a/src/main/java/org/opensearch/securityanalytics/action/GetMappingsViewAction.java b/src/main/java/org/opensearch/securityanalytics/action/GetMappingsViewAction.java index 57d905df5..af032a93b 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/GetMappingsViewAction.java +++ b/src/main/java/org/opensearch/securityanalytics/action/GetMappingsViewAction.java @@ -8,7 +8,7 @@ public class GetMappingsViewAction extends ActionType{ - public static final String NAME = "cluster:admin/opendistro/securityanalytics/mapping/view/get"; + public static final String NAME = "cluster:admin/opensearch/securityanalytics/mapping/view/get"; public static final GetMappingsViewAction INSTANCE = new GetMappingsViewAction(); diff --git a/src/main/java/org/opensearch/securityanalytics/action/GetMappingsViewRequest.java b/src/main/java/org/opensearch/securityanalytics/action/GetMappingsViewRequest.java index e5f83a779..f784f8501 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/GetMappingsViewRequest.java +++ b/src/main/java/org/opensearch/securityanalytics/action/GetMappingsViewRequest.java @@ -11,8 +11,8 @@ import org.opensearch.common.collect.ImmutableOpenMap; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.core.xcontent.XContentParser; import static org.opensearch.action.ValidateActions.addValidationError; diff --git a/src/main/java/org/opensearch/securityanalytics/action/GetMappingsViewResponse.java b/src/main/java/org/opensearch/securityanalytics/action/GetMappingsViewResponse.java index 8502cba05..4af7cfb10 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/GetMappingsViewResponse.java +++ b/src/main/java/org/opensearch/securityanalytics/action/GetMappingsViewResponse.java @@ -4,30 +4,28 @@ */ package org.opensearch.securityanalytics.action; -import com.carrotsearch.hppc.cursors.ObjectObjectCursor; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import org.opensearch.Version; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.opensearch.action.ActionResponse; -import org.opensearch.cluster.metadata.MappingMetadata; -import org.opensearch.common.ParseField; import org.opensearch.common.Strings; -import org.opensearch.common.bytes.BytesReference; -import org.opensearch.common.collect.ImmutableOpenMap; -import org.opensearch.common.compress.CompressedXContent; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.index.mapper.MapperService; import org.opensearch.securityanalytics.mapper.MapperUtils; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; public class GetMappingsViewResponse extends ActionResponse implements ToXContentObject { + private Logger logger = LogManager.getLogger(GetMappingsViewResponse.class); + public static final String UNMAPPED_INDEX_FIELDS = "unmapped_index_fields"; public static final String UNMAPPED_FIELD_ALIASES = "unmapped_field_aliases"; @@ -133,7 +131,12 @@ public List getUnmappedIndexFields() { @Override public String toString() { - return Strings.toString(this); + try { + return Strings.toString(this.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)); + } catch (IOException e) { + logger.error(e.getMessage()); + return ""; + } } @Override diff --git a/src/main/java/org/opensearch/securityanalytics/action/IndexCorrelationRuleAction.java b/src/main/java/org/opensearch/securityanalytics/action/IndexCorrelationRuleAction.java new file mode 100644 index 000000000..48359d2de --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/action/IndexCorrelationRuleAction.java @@ -0,0 +1,21 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.securityanalytics.action; + +import org.opensearch.action.ActionType; + +public class IndexCorrelationRuleAction extends ActionType { + + public static final IndexCorrelationRuleAction INSTANCE = new IndexCorrelationRuleAction(); + public static final String NAME = "cluster:admin/index/correlation/rules/create"; + + private IndexCorrelationRuleAction() { + super(NAME, IndexCorrelationRuleResponse::new); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/action/IndexCorrelationRuleRequest.java b/src/main/java/org/opensearch/securityanalytics/action/IndexCorrelationRuleRequest.java new file mode 100644 index 000000000..ca1ed2595 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/action/IndexCorrelationRuleRequest.java @@ -0,0 +1,71 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.securityanalytics.action; + +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.rest.RestRequest; + +import java.io.IOException; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.opensearch.securityanalytics.model.CorrelationRule; + +public class IndexCorrelationRuleRequest extends ActionRequest { + + private String correlationRuleId; + + private CorrelationRule correlationRule; + + private RestRequest.Method method; + + private static final Pattern IS_VALID_RULE_NAME = Pattern.compile("[a-zA-Z0-9 _,-.]{5,50}"); + + public IndexCorrelationRuleRequest(String correlationRuleId, CorrelationRule correlationRule, RestRequest.Method method) { + super(); + this.correlationRuleId = correlationRuleId; + this.correlationRule = correlationRule; + this.method = method; + } + + public IndexCorrelationRuleRequest(StreamInput sin) throws IOException { + this(sin.readString(), CorrelationRule.readFrom(sin), sin.readEnum(RestRequest.Method.class)); + } + + @Override + public ActionRequestValidationException validate() { + Matcher matcher = IS_VALID_RULE_NAME.matcher(correlationRule.getName()); + boolean find = matcher.matches(); + if (!find) { + throw new ActionRequestValidationException(); + } + return null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(correlationRuleId); + correlationRule.writeTo(out); + } + + public String getCorrelationRuleId() { + return correlationRuleId; + } + + public CorrelationRule getCorrelationRule() { + return correlationRule; + } + + public RestRequest.Method getMethod() { + return method; + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/action/IndexCorrelationRuleResponse.java b/src/main/java/org/opensearch/securityanalytics/action/IndexCorrelationRuleResponse.java new file mode 100644 index 000000000..c72d4512a --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/action/IndexCorrelationRuleResponse.java @@ -0,0 +1,69 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.securityanalytics.action; + +import org.opensearch.action.ActionResponse; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.rest.RestStatus; + +import java.io.IOException; +import org.opensearch.securityanalytics.model.CorrelationRule; + +public class IndexCorrelationRuleResponse extends ActionResponse implements ToXContentObject { + + public static final String _ID = "_id"; + public static final String _VERSION = "_version"; + + private String id; + + private Long version; + + private RestStatus status; + + private CorrelationRule correlationRule; + + public IndexCorrelationRuleResponse(String id, Long version, RestStatus status, CorrelationRule correlationRule) { + super(); + this.id = id; + this.version = version; + this.status = status; + this.correlationRule = correlationRule; + } + + public IndexCorrelationRuleResponse(StreamInput sin) throws IOException { + this(sin.readString(), sin.readLong(), sin.readEnum(RestStatus.class), CorrelationRule.readFrom(sin)); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject().field(_ID, id).field(_VERSION, version); + + builder.field("rule", correlationRule); + return builder.endObject(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(id); + out.writeLong(version); + out.writeEnum(status); + correlationRule.writeTo(out); + } + + public String getId() { + return id; + } + + public RestStatus getStatus() { + return status; + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/action/IndexDetectorAction.java b/src/main/java/org/opensearch/securityanalytics/action/IndexDetectorAction.java index 0371ea158..18d2f219d 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/IndexDetectorAction.java +++ b/src/main/java/org/opensearch/securityanalytics/action/IndexDetectorAction.java @@ -9,7 +9,7 @@ public class IndexDetectorAction extends ActionType { public static final IndexDetectorAction INSTANCE = new IndexDetectorAction(); - public static final String NAME = "cluster:admin/opendistro/securityanalytics/detector/write"; + public static final String NAME = "cluster:admin/opensearch/securityanalytics/detector/write"; public IndexDetectorAction() { super(NAME, IndexDetectorResponse::new); diff --git a/src/main/java/org/opensearch/securityanalytics/action/IndexDetectorResponse.java b/src/main/java/org/opensearch/securityanalytics/action/IndexDetectorResponse.java index 78b0fb91f..6e5a128c7 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/IndexDetectorResponse.java +++ b/src/main/java/org/opensearch/securityanalytics/action/IndexDetectorResponse.java @@ -7,8 +7,8 @@ import org.opensearch.action.ActionResponse; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.rest.RestStatus; import org.opensearch.securityanalytics.model.Detector; diff --git a/src/main/java/org/opensearch/securityanalytics/action/IndexRuleAction.java b/src/main/java/org/opensearch/securityanalytics/action/IndexRuleAction.java index 4a1c8f708..5500c34d2 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/IndexRuleAction.java +++ b/src/main/java/org/opensearch/securityanalytics/action/IndexRuleAction.java @@ -9,7 +9,7 @@ public class IndexRuleAction extends ActionType { public static final IndexRuleAction INSTANCE = new IndexRuleAction(); - public static final String NAME = "cluster:admin/opendistro/securityanalytics/rule/write"; + public static final String NAME = "cluster:admin/opensearch/securityanalytics/rule/write"; public IndexRuleAction() { super(NAME, IndexRuleResponse::new); diff --git a/src/main/java/org/opensearch/securityanalytics/action/IndexRuleRequest.java b/src/main/java/org/opensearch/securityanalytics/action/IndexRuleRequest.java index a15324ee0..0702b7ac2 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/IndexRuleRequest.java +++ b/src/main/java/org/opensearch/securityanalytics/action/IndexRuleRequest.java @@ -4,6 +4,9 @@ */ package org.opensearch.securityanalytics.action; +import java.util.Arrays; +import java.util.Locale; +import java.util.Optional; import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.support.WriteRequest; @@ -12,6 +15,10 @@ import org.opensearch.rest.RestRequest; import java.io.IOException; +import org.opensearch.securityanalytics.model.Detector; + + +import static org.opensearch.action.ValidateActions.addValidationError; public class IndexRuleRequest extends ActionRequest { @@ -57,7 +64,7 @@ public IndexRuleRequest( super(); this.ruleId = ruleId; this.refreshPolicy = refreshPolicy; - this.logType = logType; + this.logType = logType.toLowerCase(Locale.ROOT); this.method = method; this.rule = rule; this.forced = forced; @@ -74,7 +81,20 @@ public IndexRuleRequest(StreamInput sin) throws IOException { @Override public ActionRequestValidationException validate() { - return null; + ActionRequestValidationException validationException = null; + + if (logType == null || logType.length() == 0) { + validationException = addValidationError("rule categoty is missing", validationException); + } else { + Optional found = + Arrays.stream(Detector.DetectorType.values()) + .filter(e -> e.getDetectorType().equals(logType)) + .findFirst(); + if (found.isPresent() == false) { + validationException = addValidationError("Invalid rule category", validationException); + } + } + return validationException; } @Override diff --git a/src/main/java/org/opensearch/securityanalytics/action/IndexRuleResponse.java b/src/main/java/org/opensearch/securityanalytics/action/IndexRuleResponse.java index b4308a200..02e612ac7 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/IndexRuleResponse.java +++ b/src/main/java/org/opensearch/securityanalytics/action/IndexRuleResponse.java @@ -7,8 +7,8 @@ import org.opensearch.action.ActionResponse; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.rest.RestStatus; import org.opensearch.securityanalytics.model.Detector; import org.opensearch.securityanalytics.model.Rule; diff --git a/src/main/java/org/opensearch/securityanalytics/action/ListCorrelationsAction.java b/src/main/java/org/opensearch/securityanalytics/action/ListCorrelationsAction.java new file mode 100644 index 000000000..7f87fce74 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/action/ListCorrelationsAction.java @@ -0,0 +1,16 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.action; + +import org.opensearch.action.ActionType; + +public class ListCorrelationsAction extends ActionType { + public static final ListCorrelationsAction INSTANCE = new ListCorrelationsAction(); + public static final String NAME = "cluster:admin/opensearch/securityanalytics/correlations/list"; + + public ListCorrelationsAction() { + super(NAME, ListCorrelationsResponse::new); + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/action/ListCorrelationsRequest.java b/src/main/java/org/opensearch/securityanalytics/action/ListCorrelationsRequest.java new file mode 100644 index 000000000..ff3fd225c --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/action/ListCorrelationsRequest.java @@ -0,0 +1,51 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.action; + +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +public class ListCorrelationsRequest extends ActionRequest { + + private Long startTimestamp; + + private Long endTimestamp; + + public ListCorrelationsRequest(Long startTimestamp, Long endTimestamp) { + super(); + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + } + + public ListCorrelationsRequest(StreamInput sin) throws IOException { + this( + sin.readLong(), + sin.readLong() + ); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeLong(startTimestamp); + out.writeLong(endTimestamp); + } + + public Long getStartTimestamp() { + return startTimestamp; + } + + public Long getEndTimestamp() { + return endTimestamp; + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/action/ListCorrelationsResponse.java b/src/main/java/org/opensearch/securityanalytics/action/ListCorrelationsResponse.java new file mode 100644 index 000000000..80eea0003 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/action/ListCorrelationsResponse.java @@ -0,0 +1,47 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.action; + +import org.opensearch.action.ActionResponse; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.securityanalytics.model.CorrelatedFinding; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; + +public class ListCorrelationsResponse extends ActionResponse implements ToXContentObject { + + private List correlatedFindings; + + protected static final String FINDINGS = "findings"; + + public ListCorrelationsResponse(List correlatedFindings) { + super(); + this.correlatedFindings = correlatedFindings; + } + + public ListCorrelationsResponse(StreamInput sin) throws IOException { + this( + Collections.unmodifiableList(sin.readList(CorrelatedFinding::new)) + ); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(correlatedFindings); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject() + .field(FINDINGS, correlatedFindings) + .endObject(); + return builder; + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/action/SearchCorrelationRuleAction.java b/src/main/java/org/opensearch/securityanalytics/action/SearchCorrelationRuleAction.java new file mode 100644 index 000000000..da775e4e2 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/action/SearchCorrelationRuleAction.java @@ -0,0 +1,18 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.action; + +import org.opensearch.action.ActionType; +import org.opensearch.action.search.SearchResponse; + +public class SearchCorrelationRuleAction extends ActionType { + + public static final SearchCorrelationRuleAction INSTANCE = new SearchCorrelationRuleAction(); + public static final String NAME = "cluster:admin/opensearch/securityanalytics/correlation/rule/search"; + + public SearchCorrelationRuleAction() { + super(NAME, SearchResponse::new); + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/action/SearchCorrelationRuleRequest.java b/src/main/java/org/opensearch/securityanalytics/action/SearchCorrelationRuleRequest.java new file mode 100644 index 000000000..dcf4fb955 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/action/SearchCorrelationRuleRequest.java @@ -0,0 +1,44 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.action; + +import java.io.IOException; +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; + +public class SearchCorrelationRuleRequest extends ActionRequest { + + /** + * this param decides whether search will be done on pre-packaged rules or custom rules. + */ + + private SearchRequest searchRequest; + + public SearchCorrelationRuleRequest(SearchRequest searchRequest) { + super(); + this.searchRequest = searchRequest; + } + + public SearchCorrelationRuleRequest(StreamInput sin) throws IOException { + this(new SearchRequest(sin)); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + searchRequest.writeTo(out); + } + + public SearchRequest getSearchRequest() { + return searchRequest; + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/action/SearchDetectorAction.java b/src/main/java/org/opensearch/securityanalytics/action/SearchDetectorAction.java index f33b3c84f..350ef1a32 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/SearchDetectorAction.java +++ b/src/main/java/org/opensearch/securityanalytics/action/SearchDetectorAction.java @@ -10,7 +10,7 @@ public class SearchDetectorAction extends ActionType { public static final SearchDetectorAction INSTANCE = new SearchDetectorAction(); - public static final String NAME = "cluster:admin/opendistro/securityanalytics/detector/search"; + public static final String NAME = "cluster:admin/opensearch/securityanalytics/detector/search"; public SearchDetectorAction() { super(NAME, SearchResponse::new); diff --git a/src/main/java/org/opensearch/securityanalytics/action/SearchRuleAction.java b/src/main/java/org/opensearch/securityanalytics/action/SearchRuleAction.java index 967a2e1b9..3c0cdc2e7 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/SearchRuleAction.java +++ b/src/main/java/org/opensearch/securityanalytics/action/SearchRuleAction.java @@ -10,7 +10,7 @@ public class SearchRuleAction extends ActionType { public static final SearchRuleAction INSTANCE = new SearchRuleAction(); - public static final String NAME = "cluster:admin/opendistro/securityanalytics/rule/search"; + public static final String NAME = "cluster:admin/opensearch/securityanalytics/rule/search"; public SearchRuleAction() { super(NAME, SearchResponse::new); diff --git a/src/main/java/org/opensearch/securityanalytics/action/UpdateIndexMappingsAction.java b/src/main/java/org/opensearch/securityanalytics/action/UpdateIndexMappingsAction.java index 71f502fd5..3af858b14 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/UpdateIndexMappingsAction.java +++ b/src/main/java/org/opensearch/securityanalytics/action/UpdateIndexMappingsAction.java @@ -9,7 +9,7 @@ public class UpdateIndexMappingsAction extends ActionType{ - public static final String NAME = "cluster:admin/opendistro/securityanalytics/mapping/update"; + public static final String NAME = "cluster:admin/opensearch/securityanalytics/mapping/update"; public static final UpdateIndexMappingsAction INSTANCE = new UpdateIndexMappingsAction(); diff --git a/src/main/java/org/opensearch/securityanalytics/action/UpdateIndexMappingsRequest.java b/src/main/java/org/opensearch/securityanalytics/action/UpdateIndexMappingsRequest.java index 36dc66f53..e9bef0c4d 100644 --- a/src/main/java/org/opensearch/securityanalytics/action/UpdateIndexMappingsRequest.java +++ b/src/main/java/org/opensearch/securityanalytics/action/UpdateIndexMappingsRequest.java @@ -8,10 +8,10 @@ import org.opensearch.action.ActionRequestValidationException; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; import java.io.IOException; import java.util.Locale; diff --git a/src/main/java/org/opensearch/securityanalytics/action/ValidateRulesAction.java b/src/main/java/org/opensearch/securityanalytics/action/ValidateRulesAction.java new file mode 100644 index 000000000..cde6ebac8 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/action/ValidateRulesAction.java @@ -0,0 +1,19 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.action; + +import org.opensearch.action.ActionType; +import org.opensearch.action.support.master.AcknowledgedResponse; + +public class ValidateRulesAction extends ActionType{ + + public static final String NAME = "cluster:admin/opendistro/securityanalytics/rules/validate"; + public static final ValidateRulesAction INSTANCE = new ValidateRulesAction(); + + + public ValidateRulesAction() { + super(NAME, ValidateRulesResponse::new); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/action/ValidateRulesRequest.java b/src/main/java/org/opensearch/securityanalytics/action/ValidateRulesRequest.java new file mode 100644 index 000000000..926c0a4e0 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/action/ValidateRulesRequest.java @@ -0,0 +1,136 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.action; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.common.Strings; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.common.xcontent.XContentParserUtils; + + +import static org.opensearch.action.ValidateActions.addValidationError; + +public class ValidateRulesRequest extends ActionRequest implements ToXContentObject { + + private static final Logger log = LogManager.getLogger(ValidateRulesRequest.class); + + public static final String INDEX_NAME_FIELD = "index_name"; + public static final String RULES_FIELD = "rules"; + + String indexName; + List rules; + + public ValidateRulesRequest(String indexName, List rules) { + super(); + this.indexName = indexName; + this.rules = rules; + } + + public ValidateRulesRequest(StreamInput sin) throws IOException { + this( + sin.readString(), + sin.readStringList() + ); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (indexName == null || indexName.length() == 0) { + validationException = addValidationError(String.format(Locale.getDefault(), "%s is missing", INDEX_NAME_FIELD), validationException); + } + if (rules == null || rules.size() == 0) { + validationException = addValidationError(String.format(Locale.getDefault(), "%s are missing", RULES_FIELD), validationException); + } + return validationException; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(indexName); + out.writeStringCollection(rules); + } + + public static ValidateRulesRequest parse(XContentParser xcp) throws IOException { + String indexName = null; + List ruleIds = null; + + if (xcp.currentToken() == null) { + xcp.nextToken(); + } + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp); + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + String fieldName = xcp.currentName(); + xcp.nextToken(); + + switch (fieldName) { + case INDEX_NAME_FIELD: + indexName = xcp.text(); + break; + case RULES_FIELD: + ruleIds = new ArrayList<>(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp); + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + ruleIds.add(xcp.text()); + } + break; + default: + xcp.skipChildren(); + } + } + return new ValidateRulesRequest(indexName, ruleIds); + } + + public ValidateRulesRequest indexName(String indexName) { + this.indexName = indexName; + return this; + } + + public ValidateRulesRequest rules(List rules) { + this.rules = rules; + return this; + } + + + + public String getIndexName() { + return this.indexName; + } + + public List getRules() { + return this.rules; + } + + public void setIndexName(String indexName) { + this.indexName = indexName; + } + + public List setRules(List rules) { + return this.rules = rules; + } + + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject() + .field(INDEX_NAME_FIELD, indexName) + .field(RULES_FIELD, rules) + .endObject(); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/action/ValidateRulesResponse.java b/src/main/java/org/opensearch/securityanalytics/action/ValidateRulesResponse.java new file mode 100644 index 000000000..b68878400 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/action/ValidateRulesResponse.java @@ -0,0 +1,99 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.action; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.action.ActionResponse; +import org.opensearch.common.Strings; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +public class ValidateRulesResponse extends ActionResponse implements ToXContentObject { + + private Logger logger = LogManager.getLogger(ValidateRulesResponse.class); + + public static final String NONAPPLICABLE_FIELDS = "nonapplicable_fields"; + + List nonapplicableFields; + + public ValidateRulesResponse(List nonapplicableFields) { + this.nonapplicableFields = nonapplicableFields; + } + + public ValidateRulesResponse(StreamInput in) throws IOException { + super(in); + nonapplicableFields = in.readStringList(); + int size = in.readVInt(); + if (size > 0) { + nonapplicableFields = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + nonapplicableFields.add(in.readString()); + } + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + if (nonapplicableFields != null) { + out.writeVInt(nonapplicableFields.size()); + for (String f : nonapplicableFields) { + out.writeString(f); + } + } else { + out.writeVInt(0); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (nonapplicableFields != null && nonapplicableFields.size() > 0) { + builder.field(NONAPPLICABLE_FIELDS, nonapplicableFields); + } + return builder.endObject(); + } + + public List getNonapplicableFields() { + return nonapplicableFields; + } + + @Override + public String toString() { + try { + return Strings.toString(this.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)); + } catch (IOException e) { + logger.error(e.getMessage()); + return ""; + } + } + + @Override + public int hashCode() { + return Objects.hash(new Object[]{this.nonapplicableFields}); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + + if (getClass() != obj.getClass()) { + return false; + } + ValidateRulesResponse other = (ValidateRulesResponse) obj; + return this.nonapplicableFields.equals(other.nonapplicableFields); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/alerts/AlertsService.java b/src/main/java/org/opensearch/securityanalytics/alerts/AlertsService.java index 2508786f0..d14a17344 100644 --- a/src/main/java/org/opensearch/securityanalytics/alerts/AlertsService.java +++ b/src/main/java/org/opensearch/securityanalytics/alerts/AlertsService.java @@ -6,6 +6,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.opensearch.OpenSearchStatusException; import org.opensearch.action.ActionListener; import org.opensearch.action.support.GroupedActionListener; import org.opensearch.action.support.WriteRequest; @@ -17,6 +18,7 @@ import org.opensearch.commons.alerting.action.GetAlertsRequest; import org.opensearch.commons.alerting.model.Alert; import org.opensearch.commons.alerting.model.Table; +import org.opensearch.rest.RestStatus; import org.opensearch.securityanalytics.action.AckAlertsResponse; import org.opensearch.securityanalytics.action.AlertDto; import org.opensearch.securityanalytics.action.GetAlertsResponse; @@ -55,6 +57,8 @@ public AlertsService(Client client) { * * @param detectorId id of Detector * @param table group of search related parameters + * @param severityLevel alert severity level + * @param alertState current alert state * @param listener ActionListener to get notified on response or error */ public void getAlertsByDetectorId( @@ -80,7 +84,7 @@ public void onResponse(GetDetectorResponse getDetectorResponse) { AlertsService.this.getAlertsByMonitorIds( monitorToDetectorMapping, monitorIds, - DetectorMonitorConfig.getAlertsIndex(detector.getDetectorType()), + DetectorMonitorConfig.getAllAlertsIndicesPattern(detector.getDetectorType()), table, severityLevel, alertState, @@ -102,7 +106,7 @@ public void onFailure(Exception e) { @Override public void onFailure(Exception e) { - listener.onFailure(SecurityAnalyticsException.wrap(e)); + listener.onFailure(e); } }); } @@ -110,8 +114,12 @@ public void onFailure(Exception e) { /** * Searches alerts generated by specific Monitor * - * @param monitorIds id of Monitor + * @param monitorToDetectorMapping monitorId to detectorId mapping + * @param monitorIds list of monitor ids + * @param alertIndex alert index to search alerts on * @param table group of search related parameters + * @param severityLevel alert severity level + * @param alertState current alert state * * @param listener ActionListener to get notified on response or error */ public void getAlertsByMonitorIds( @@ -172,7 +180,7 @@ public void getAlerts( ActionListener listener ) { if (detectors.size() == 0) { - throw SecurityAnalyticsException.wrap(new IllegalArgumentException("detector list is empty!")); + throw new OpenSearchStatusException("detector list is empty!", RestStatus.NOT_FOUND); } List allMonitorIds = new ArrayList<>(); @@ -191,7 +199,7 @@ public void getAlerts( AlertsService.this.getAlertsByMonitorIds( monitorToDetectorMapping, allMonitorIds, - DetectorMonitorConfig.getAlertsIndex(detectorType.getDetectorType()), + DetectorMonitorConfig.getAllAlertsIndicesPattern(detectorType.getDetectorType()), table, severityLevel, alertState, @@ -243,7 +251,7 @@ public void getAlerts(List alertIds, "ALL", "ALL", null, - DetectorMonitorConfig.getAlertsIndex(detector.getDetectorType()), + DetectorMonitorConfig.getAllAlertsIndicesPattern(detector.getDetectorType()), null, alertIds); AlertingPluginInterface.INSTANCE.getAlerts( @@ -253,9 +261,9 @@ public void getAlerts(List alertIds, } /** - * @param getAlertsResponse - * @param getDetectorResponse - * @param actionListener + * @param getAlertsResponse GetAlerts API response + * @param getDetectorResponse GetDetector API response + * @param actionListener Action Listener */ public void ackknowledgeAlerts(org.opensearch.commons.alerting.action.GetAlertsResponse getAlertsResponse, GetDetectorResponse getDetectorResponse, diff --git a/src/main/java/org/opensearch/securityanalytics/config/monitors/DetectorMonitorConfig.java b/src/main/java/org/opensearch/securityanalytics/config/monitors/DetectorMonitorConfig.java index a24750ae2..fc79c6454 100644 --- a/src/main/java/org/opensearch/securityanalytics/config/monitors/DetectorMonitorConfig.java +++ b/src/main/java/org/opensearch/securityanalytics/config/monitors/DetectorMonitorConfig.java @@ -4,6 +4,8 @@ */ package org.opensearch.securityanalytics.config.monitors; +import java.util.List; +import java.util.stream.Collectors; import org.opensearch.securityanalytics.model.Detector; import java.util.Arrays; @@ -13,71 +15,117 @@ public class DetectorMonitorConfig { + public static final String OPENSEARCH_DEFAULT_RULE_INDEX = ".opensearch-sap-detectors-queries-default"; public static final String OPENSEARCH_DEFAULT_ALERT_INDEX = ".opensearch-sap-alerts-default"; + public static final String OPENSEARCH_DEFAULT_ALL_ALERT_INDICES_PATTERN = ".opensearch-sap-alerts-default*"; public static final String OPENSEARCH_DEFAULT_ALERT_HISTORY_INDEX = ".opensearch-sap-alerts-history-default"; public static final String OPENSEARCH_DEFAULT_ALERT_HISTORY_INDEX_PATTERN = "<.opensearch-sap-alerts-history-default-{now/d}-1>"; public static final String OPENSEARCH_DEFAULT_FINDINGS_INDEX = ".opensearch-sap-findings-default"; + public static final String OPENSEARCH_DEFAULT_ALL_FINDINGS_INDICES_PATTERN = ".opensearch-sap-findings-default*"; public static final String OPENSEARCH_DEFAULT_FINDINGS_INDEX_PATTERN = "<.opensearch-sap-findings-default-{now/d}-1>"; - private static Map ruleIndexByDetectorTypeMap; + public static final String OPENSEARCH_SAP_RULE_INDEX_TEMPLATE = ".opensearch-sap-detectors-queries-index-template"; + + private static Map detectorTypeToIndicesMapping; static { - ruleIndexByDetectorTypeMap = new HashMap<>(); + detectorTypeToIndicesMapping = new HashMap<>(); Arrays.stream(Detector.DetectorType.values()).forEach( detectorType -> { String ruleIndex = String.format( - Locale.getDefault(), ".opensearch-sap-detectors-queries-%s", detectorType.getDetectorType()); + Locale.getDefault(), ".opensearch-sap-%s-detectors-queries", detectorType.getDetectorType()); String alertsIndex = String.format( - Locale.getDefault(), ".opensearch-sap-alerts-%s", detectorType.getDetectorType()); + Locale.getDefault(), ".opensearch-sap-%s-alerts", detectorType.getDetectorType()); String alertsHistoryIndex = String.format( - Locale.getDefault(), ".opensearch-sap-alerts-history-%s", detectorType.getDetectorType()); + Locale.getDefault(), ".opensearch-sap-%s-alerts-history", detectorType.getDetectorType()); String alertsHistoryIndexPattern = String.format( - Locale.getDefault(), "<.opensearch-sap-alerts-history-%s-{now/d}-1>", detectorType.getDetectorType()); + Locale.getDefault(), "<.opensearch-sap-%s-alerts-history-{now/d}-1>", detectorType.getDetectorType()); + String allAlertsIndicesPattern = String.format( + Locale.getDefault(), ".opensearch-sap-%s-alerts*", detectorType.getDetectorType()); String findingsIndex = String.format( - Locale.getDefault(), ".opensearch-sap-findings-%s", detectorType.getDetectorType()); + Locale.getDefault(), ".opensearch-sap-%s-findings", detectorType.getDetectorType()); + String allFindingsIndicesPattern = String.format( + Locale.getDefault(), ".opensearch-sap-%s-findings*", detectorType.getDetectorType()); String findingsIndexPattern = String.format( - Locale.getDefault(), "<.opensearch-sap-findings-%s-{now/d}-1>", detectorType.getDetectorType()); - - MonitorConfig monitor = new MonitorConfig(alertsIndex, alertsHistoryIndex, alertsHistoryIndexPattern, findingsIndex, findingsIndexPattern, ruleIndex); - ruleIndexByDetectorTypeMap.put(detectorType.getDetectorType(), monitor); + Locale.getDefault(), "<.opensearch-sap-%s-findings-{now/d}-1>", detectorType.getDetectorType()); + + MonitorConfig monitor = new MonitorConfig( + alertsIndex, alertsHistoryIndex, alertsHistoryIndexPattern, allAlertsIndicesPattern, + findingsIndex, findingsIndexPattern, allFindingsIndicesPattern, + ruleIndex + ); + detectorTypeToIndicesMapping.put(detectorType.getDetectorType(), monitor); }); } public static String getRuleIndex(String detectorType) { - return ruleIndexByDetectorTypeMap.containsKey(detectorType) ? - ruleIndexByDetectorTypeMap.get(detectorType).getRuleIndex() : + return detectorTypeToIndicesMapping.containsKey(detectorType.toLowerCase(Locale.ROOT)) ? + detectorTypeToIndicesMapping.get(detectorType.toLowerCase(Locale.ROOT)).getRuleIndex() : OPENSEARCH_DEFAULT_RULE_INDEX; } public static String getAlertsIndex(String detectorType) { - return ruleIndexByDetectorTypeMap.containsKey(detectorType) ? - ruleIndexByDetectorTypeMap.get(detectorType).getAlertsIndex() : + return detectorTypeToIndicesMapping.containsKey(detectorType.toLowerCase(Locale.ROOT)) ? + detectorTypeToIndicesMapping.get(detectorType.toLowerCase(Locale.ROOT)).getAlertsIndex() : OPENSEARCH_DEFAULT_ALERT_INDEX; } public static String getAlertsHistoryIndex(String detectorType) { - return ruleIndexByDetectorTypeMap.containsKey(detectorType) ? - ruleIndexByDetectorTypeMap.get(detectorType).getAlertsHistoryIndex() : + return detectorTypeToIndicesMapping.containsKey(detectorType.toLowerCase(Locale.ROOT)) ? + detectorTypeToIndicesMapping.get(detectorType.toLowerCase(Locale.ROOT)).getAlertsHistoryIndex() : OPENSEARCH_DEFAULT_ALERT_HISTORY_INDEX; } public static String getAlertsHistoryIndexPattern(String detectorType) { - return ruleIndexByDetectorTypeMap.containsKey(detectorType) ? - ruleIndexByDetectorTypeMap.get(detectorType).getAlertsHistoryIndexPattern() : + return detectorTypeToIndicesMapping.containsKey(detectorType.toLowerCase(Locale.ROOT)) ? + detectorTypeToIndicesMapping.get(detectorType.toLowerCase(Locale.ROOT)).getAlertsHistoryIndexPattern() : OPENSEARCH_DEFAULT_ALERT_HISTORY_INDEX_PATTERN; } + public static String getAllAlertsIndicesPattern(String detectorType) { + return detectorTypeToIndicesMapping.containsKey(detectorType.toLowerCase(Locale.ROOT)) ? + detectorTypeToIndicesMapping.get(detectorType.toLowerCase(Locale.ROOT)).getAllAlertsIndicesPattern() : + OPENSEARCH_DEFAULT_ALL_ALERT_INDICES_PATTERN; + } + + public static List getAllAlertsIndicesPatternForAllTypes() { + return detectorTypeToIndicesMapping.entrySet() + .stream() + .map(e -> e.getValue().getAllAlertsIndicesPattern()) + .collect(Collectors.toList()); + } + public static String getFindingsIndex(String detectorType) { - return ruleIndexByDetectorTypeMap.containsKey(detectorType) ? - ruleIndexByDetectorTypeMap.get(detectorType).getFindingsIndex() : + return detectorTypeToIndicesMapping.containsKey(detectorType.toLowerCase(Locale.ROOT)) ? + detectorTypeToIndicesMapping.get(detectorType.toLowerCase(Locale.ROOT)).getFindingsIndex() : OPENSEARCH_DEFAULT_FINDINGS_INDEX; } + public static String getAllFindingsIndicesPattern(String detectorType) { + return detectorTypeToIndicesMapping.containsKey(detectorType.toLowerCase(Locale.ROOT)) ? + detectorTypeToIndicesMapping.get(detectorType.toLowerCase(Locale.ROOT)).getAllFindingsIndicesPattern() : + OPENSEARCH_DEFAULT_ALL_FINDINGS_INDICES_PATTERN; + } + + public static List getAllFindingsIndicesPatternForAllTypes() { + return detectorTypeToIndicesMapping.entrySet() + .stream() + .map(e -> e.getValue().getAllFindingsIndicesPattern()) + .collect(Collectors.toList()); + } + + public static List getAllRuleIndices() { + return detectorTypeToIndicesMapping.entrySet() + .stream() + .map(e -> e.getValue().getRuleIndex()) + .collect(Collectors.toList()); + } + public static String getFindingsIndexPattern(String detectorType) { - return ruleIndexByDetectorTypeMap.containsKey(detectorType) ? - ruleIndexByDetectorTypeMap.get(detectorType).getFindingsIndexPattern() : - OPENSEARCH_DEFAULT_FINDINGS_INDEX; + return detectorTypeToIndicesMapping.containsKey(detectorType.toLowerCase(Locale.ROOT)) ? + detectorTypeToIndicesMapping.get(detectorType.toLowerCase(Locale.ROOT)).getFindingsIndexPattern() : + OPENSEARCH_DEFAULT_FINDINGS_INDEX_PATTERN; } public static Map> getRuleIndexMappingsByType(String detectorType) { @@ -88,27 +136,32 @@ public static Map> getRuleIndexMappingsByType(String return fieldMappingProperties; } - private static class MonitorConfig { + public static class MonitorConfig { private final String alertsIndex; private final String alertsHistoryIndex; private final String alertsHistoryIndexPattern; + private final String allAlertsIndicesPattern; private final String findingIndex; private final String findingsIndexPattern; + private final String allFindingsIndicesPattern; private final String ruleIndex; private MonitorConfig( String alertsIndex, String alertsHistoryIndex, String alertsHistoryIndexPattern, + String allAlertsIndicesPattern, String findingsIndex, String findingsIndexPattern, - String ruleIndex - ) { + String allFindingsIndicesPattern, + String ruleIndex) { this.alertsIndex = alertsIndex; this.alertsHistoryIndex = alertsHistoryIndex; this.alertsHistoryIndexPattern = alertsHistoryIndexPattern; + this.allAlertsIndicesPattern = allAlertsIndicesPattern; this.findingIndex = findingsIndex; this.findingsIndexPattern = findingsIndexPattern; + this.allFindingsIndicesPattern = allFindingsIndicesPattern; this.ruleIndex = ruleIndex; } @@ -124,6 +177,10 @@ public String getAlertsHistoryIndexPattern() { return alertsHistoryIndexPattern; } + public String getAllAlertsIndicesPattern() { + return allAlertsIndicesPattern; + } + public String getFindingsIndex() { return findingIndex; } @@ -132,6 +189,10 @@ public String getFindingsIndexPattern() { return findingsIndexPattern; } + public String getAllFindingsIndicesPattern() { + return allFindingsIndicesPattern; + } + public String getRuleIndex() { return ruleIndex; } diff --git a/src/main/java/org/opensearch/securityanalytics/correlation/CorrelationConstants.java b/src/main/java/org/opensearch/securityanalytics/correlation/CorrelationConstants.java new file mode 100644 index 000000000..cb4e5dfa9 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/correlation/CorrelationConstants.java @@ -0,0 +1,12 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.correlation; + +public class CorrelationConstants { + public static final String METHOD_PARAMETER_M = "m"; + public static final String METHOD_PARAMETER_EF_CONSTRUCTION = "ef_construction"; + public static final String DIMENSION = "dimension"; + public static final String CORRELATION_CONTEXT = "correlation_ctx"; +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/correlation/JoinEngine.java b/src/main/java/org/opensearch/securityanalytics/correlation/JoinEngine.java new file mode 100644 index 000000000..5e4bb6629 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/correlation/JoinEngine.java @@ -0,0 +1,430 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.correlation; + +import kotlin.Pair; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.search.join.ScoreMode; +import org.opensearch.OpenSearchStatusException; +import org.opensearch.action.ActionListener; +import org.opensearch.action.search.MultiSearchRequest; +import org.opensearch.action.search.MultiSearchResponse; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.client.Client; +import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.commons.alerting.action.PublishFindingsRequest; +import org.opensearch.commons.alerting.model.Finding; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.index.query.BoolQueryBuilder; +import org.opensearch.index.query.NestedQueryBuilder; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.index.query.RangeQueryBuilder; +import org.opensearch.rest.RestStatus; +import org.opensearch.search.SearchHit; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.securityanalytics.config.monitors.DetectorMonitorConfig; +import org.opensearch.securityanalytics.model.CorrelationQuery; +import org.opensearch.securityanalytics.model.CorrelationRule; +import org.opensearch.securityanalytics.model.Detector; +import org.opensearch.securityanalytics.transport.TransportCorrelateFindingAction; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + + +public class JoinEngine { + + private final Client client; + + private final PublishFindingsRequest request; + + private final NamedXContentRegistry xContentRegistry; + + private volatile long corrTimeWindow; + + private final TransportCorrelateFindingAction.AsyncCorrelateFindingAction correlateFindingAction; + + private static final Logger log = LogManager.getLogger(JoinEngine.class); + + public JoinEngine(Client client, PublishFindingsRequest request, NamedXContentRegistry xContentRegistry, + long corrTimeWindow, TransportCorrelateFindingAction.AsyncCorrelateFindingAction correlateFindingAction) { + this.client = client; + this.request = request; + this.xContentRegistry = xContentRegistry; + this.corrTimeWindow = corrTimeWindow; + this.correlateFindingAction = correlateFindingAction; + } + + public void onSearchDetectorResponse(Detector detector, Finding finding) { + String detectorType = detector.getDetectorType().toLowerCase(Locale.ROOT); + List indices = detector.getInputs().get(0).getIndices(); + List relatedDocIds = finding.getCorrelatedDocIds(); + + NestedQueryBuilder queryBuilder = QueryBuilders.nestedQuery( + "correlate", + QueryBuilders.matchQuery("correlate.category", detectorType), + ScoreMode.None + ); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.query(queryBuilder); + searchSourceBuilder.fetchSource(true); + + SearchRequest searchRequest = new SearchRequest(); + searchRequest.indices(CorrelationRule.CORRELATION_RULE_INDEX); + searchRequest.source(searchSourceBuilder); + + client.search(searchRequest, new ActionListener<>() { + @Override + public void onResponse(SearchResponse response) { + if (response.isTimedOut()) { + correlateFindingAction.onFailures(new OpenSearchStatusException(response.toString(), RestStatus.REQUEST_TIMEOUT)); + } + + Iterator hits = response.getHits().iterator(); + List correlationRules = new ArrayList<>(); + while (hits.hasNext()) { + try { + SearchHit hit = hits.next(); + + XContentParser xcp = XContentType.JSON.xContent().createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, hit.getSourceAsString() + ); + + CorrelationRule rule = CorrelationRule.parse(xcp, hit.getId(), hit.getVersion()); + correlationRules.add(rule); + } catch (IOException e) { + correlateFindingAction.onFailures(e); + } + } + + getValidDocuments(detectorType, indices, correlationRules, relatedDocIds); + } + + @Override + public void onFailure(Exception e) { + correlateFindingAction.onFailures(e); + } + }); + } + + /** + * this method checks if the finding to be correlated has valid related docs(or not) which match join criteria. + */ + private void getValidDocuments(String detectorType, List indices, List correlationRules, List relatedDocIds) { + MultiSearchRequest mSearchRequest = new MultiSearchRequest(); + List validCorrelationRules = new ArrayList<>(); + + for (CorrelationRule rule: correlationRules) { + Optional query = rule.getCorrelationQueries().stream() + .filter(correlationQuery -> correlationQuery.getCategory().equals(detectorType)).findFirst(); + + if (query.isPresent()) { + BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery() + .filter(QueryBuilders.termsQuery("_id", relatedDocIds)) + .must(QueryBuilders.queryStringQuery(query.get().getQuery())); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.query(queryBuilder); + searchSourceBuilder.fetchSource(false); + searchSourceBuilder.size(10000); + SearchRequest searchRequest = new SearchRequest(); + searchRequest.indices(indices.toArray(new String[]{})); + searchRequest.source(searchSourceBuilder); + + validCorrelationRules.add(rule); + mSearchRequest.add(searchRequest); + } + } + + if (!mSearchRequest.requests().isEmpty()) { + client.multiSearch(mSearchRequest, new ActionListener<>() { + @Override + public void onResponse(MultiSearchResponse items) { + MultiSearchResponse.Item[] responses = items.getResponses(); + List filteredCorrelationRules = new ArrayList<>(); + + int idx = 0; + for (MultiSearchResponse.Item response : responses) { + if (response.isFailure()) { + log.info(response.getFailureMessage()); + continue; + } + + if (response.getResponse().getHits().getTotalHits().value > 0L) { + filteredCorrelationRules.add(validCorrelationRules.get(idx)); + } + ++idx; + } + + Map> categoryToQueriesMap = new HashMap<>(); + for (CorrelationRule rule: filteredCorrelationRules) { + List queries = rule.getCorrelationQueries(); + + for (CorrelationQuery query: queries) { + List correlationQueries; + if (categoryToQueriesMap.containsKey(query.getCategory())) { + correlationQueries = categoryToQueriesMap.get(query.getCategory()); + } else { + correlationQueries = new ArrayList<>(); + } + correlationQueries.add(query); + categoryToQueriesMap.put(query.getCategory(), correlationQueries); + } + } + searchFindingsByTimestamp(detectorType, categoryToQueriesMap, + filteredCorrelationRules.stream().map(CorrelationRule::getId).collect(Collectors.toList())); + } + + @Override + public void onFailure(Exception e) { + correlateFindingAction.onFailures(e); + } + }); + } else { + correlateFindingAction.getTimestampFeature(detectorType, null, request.getFinding(), List.of()); + } + } + + /** + * this method searches for parent findings given the log category & correlation time window & collects all related docs + * for them. + */ + private void searchFindingsByTimestamp(String detectorType, Map> categoryToQueriesMap, List correlationRules) { + long findingTimestamp = request.getFinding().getTimestamp().toEpochMilli(); + MultiSearchRequest mSearchRequest = new MultiSearchRequest(); + List>> categoryToQueriesPairs = new ArrayList<>(); + + for (Map.Entry> categoryToQueries: categoryToQueriesMap.entrySet()) { + RangeQueryBuilder queryBuilder = QueryBuilders.rangeQuery("timestamp") + .gte(findingTimestamp - corrTimeWindow) + .lte(findingTimestamp + corrTimeWindow); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.query(queryBuilder); + searchSourceBuilder.fetchSource(false); + searchSourceBuilder.size(10000); + searchSourceBuilder.fetchField("correlated_doc_ids"); + SearchRequest searchRequest = new SearchRequest(); + searchRequest.indices(DetectorMonitorConfig.getAllFindingsIndicesPattern(categoryToQueries.getKey())); + searchRequest.source(searchSourceBuilder); + mSearchRequest.add(searchRequest); + categoryToQueriesPairs.add(new Pair<>(categoryToQueries.getKey(), categoryToQueries.getValue())); + } + + if (!mSearchRequest.requests().isEmpty()) { + client.multiSearch(mSearchRequest, new ActionListener<>() { + @Override + public void onResponse(MultiSearchResponse items) { + MultiSearchResponse.Item[] responses = items.getResponses(); + Map relatedDocsMap = new HashMap<>(); + + int idx = 0; + for (MultiSearchResponse.Item response : responses) { + if (response.isFailure()) { + log.info(response.getFailureMessage()); + continue; + } + + List relatedDocIds = new ArrayList<>(); + SearchHit[] hits = response.getResponse().getHits().getHits(); + for (SearchHit hit : hits) { + relatedDocIds.addAll(hit.getFields().get("correlated_doc_ids").getValues().stream() + .map(Object::toString).collect(Collectors.toList())); + } + + List correlationQueries = categoryToQueriesPairs.get(idx).getSecond(); + List indices = correlationQueries.stream().map(CorrelationQuery::getIndex).collect(Collectors.toList()); + List queries = correlationQueries.stream().map(CorrelationQuery::getQuery).collect(Collectors.toList()); + relatedDocsMap.put(categoryToQueriesPairs.get(idx).getFirst(), + new DocSearchCriteria( + indices, + queries, + relatedDocIds)); + ++idx; + } + searchDocsWithFilterKeys(detectorType, relatedDocsMap, correlationRules); + } + + @Override + public void onFailure(Exception e) { + correlateFindingAction.onFailures(e); + } + }); + } else { + correlateFindingAction.getTimestampFeature(detectorType, null, request.getFinding(), correlationRules); + } + } + + /** + * Given the related docs from parent findings, this method filters only those related docs which match parent join criteria. + */ + private void searchDocsWithFilterKeys(String detectorType, Map relatedDocsMap, List correlationRules) { + MultiSearchRequest mSearchRequest = new MultiSearchRequest(); + List categories = new ArrayList<>(); + + for (Map.Entry docSearchCriteria: relatedDocsMap.entrySet()) { + BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery() + .filter(QueryBuilders.termsQuery("_id", docSearchCriteria.getValue().relatedDocIds)); + + for (String query: docSearchCriteria.getValue().queries) { + queryBuilder = queryBuilder.should(QueryBuilders.queryStringQuery(query)); + } + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.query(queryBuilder); + searchSourceBuilder.fetchSource(false); + searchSourceBuilder.size(10000); + SearchRequest searchRequest = new SearchRequest(); + searchRequest.indices(docSearchCriteria.getValue().indices.toArray(new String[]{})); + searchRequest.source(searchSourceBuilder); + + categories.add(docSearchCriteria.getKey()); + mSearchRequest.add(searchRequest); + } + + if (!mSearchRequest.requests().isEmpty()) { + client.multiSearch(mSearchRequest, new ActionListener<>() { + @Override + public void onResponse(MultiSearchResponse items) { + MultiSearchResponse.Item[] responses = items.getResponses(); + Map> filteredRelatedDocIds = new HashMap<>(); + + int idx = 0; + for (MultiSearchResponse.Item response : responses) { + if (response.isFailure()) { + log.info(response.getFailureMessage()); + continue; + } + + SearchHit[] hits = response.getResponse().getHits().getHits(); + List docIds = new ArrayList<>(); + + for (SearchHit hit : hits) { + docIds.add(hit.getId()); + } + filteredRelatedDocIds.put(categories.get(idx), docIds); + ++idx; + } + getCorrelatedFindings(detectorType, filteredRelatedDocIds, correlationRules); + } + + @Override + public void onFailure(Exception e) { + correlateFindingAction.onFailures(e); + } + }); + } else { + correlateFindingAction.getTimestampFeature(detectorType, null, request.getFinding(), correlationRules); + } + } + + /** + * Given the filtered related docs of the parent findings, this method gets the actual filtered parent findings for + * the finding to be correlated. + */ + private void getCorrelatedFindings(String detectorType, Map> filteredRelatedDocIds, List correlationRules) { + long findingTimestamp = request.getFinding().getTimestamp().toEpochMilli(); + MultiSearchRequest mSearchRequest = new MultiSearchRequest(); + List categories = new ArrayList<>(); + + for (Map.Entry> relatedDocIds: filteredRelatedDocIds.entrySet()) { + BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery() + .filter(QueryBuilders.rangeQuery("timestamp") + .gte(findingTimestamp - corrTimeWindow) + .lte(findingTimestamp + corrTimeWindow)) + .must(QueryBuilders.termsQuery("correlated_doc_ids", relatedDocIds.getValue())); + + if (relatedDocIds.getKey().equals(detectorType)) { + queryBuilder = queryBuilder.mustNot(QueryBuilders.matchQuery("_id", request.getFinding().getId())); + } + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.query(queryBuilder); + searchSourceBuilder.fetchSource(false); + searchSourceBuilder.size(10000); + SearchRequest searchRequest = new SearchRequest(); + searchRequest.indices(DetectorMonitorConfig.getAllFindingsIndicesPattern(relatedDocIds.getKey())); + searchRequest.source(searchSourceBuilder); + + categories.add(relatedDocIds.getKey()); + mSearchRequest.add(searchRequest); + } + + if (!mSearchRequest.requests().isEmpty()) { + client.multiSearch(mSearchRequest, new ActionListener<>() { + @Override + public void onResponse(MultiSearchResponse items) { + MultiSearchResponse.Item[] responses = items.getResponses(); + Map> correlatedFindings = new HashMap<>(); + + int idx = 0; + for (MultiSearchResponse.Item response : responses) { + if (response.isFailure()) { + log.info(response.getFailureMessage()); + ++idx; + continue; + } + + SearchHit[] hits = response.getResponse().getHits().getHits(); + List findings = new ArrayList<>(); + + for (SearchHit hit : hits) { + findings.add(hit.getId()); + } + + if (!findings.isEmpty()) { + correlatedFindings.put(categories.get(idx), findings); + } + ++idx; + } + correlateFindingAction.initCorrelationIndex(detectorType, correlatedFindings, correlationRules); + } + + @Override + public void onFailure(Exception e) { + correlateFindingAction.onFailures(e); + } + }); + } else { + correlateFindingAction.getTimestampFeature(detectorType, null, request.getFinding(), correlationRules); + } + } + + static class DocSearchCriteria { + List indices; + List queries; + List relatedDocIds; + + public DocSearchCriteria(List indices, List queries, List relatedDocIds) { + this.indices = indices; + this.queries = queries; + this.relatedDocIds = relatedDocIds; + } + } + + static class ParentJoinCriteria { + String category; + String index; + String parentJoinQuery; + + public ParentJoinCriteria(String category, String index, String parentJoinQuery) { + this.category = category; + this.index = index; + this.parentJoinQuery = parentJoinQuery; + } + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/correlation/VectorEmbeddingsEngine.java b/src/main/java/org/opensearch/securityanalytics/correlation/VectorEmbeddingsEngine.java new file mode 100644 index 000000000..1e91835f6 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/correlation/VectorEmbeddingsEngine.java @@ -0,0 +1,568 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.correlation; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.OpenSearchStatusException; +import org.opensearch.action.ActionListener; +import org.opensearch.action.bulk.BulkRequest; +import org.opensearch.action.bulk.BulkResponse; +import org.opensearch.action.index.IndexRequest; +import org.opensearch.action.index.IndexResponse; +import org.opensearch.action.search.MultiSearchRequest; +import org.opensearch.action.search.MultiSearchResponse; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.support.WriteRequest; +import org.opensearch.client.Client; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.commons.alerting.model.Finding; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.index.query.BoolQueryBuilder; +import org.opensearch.index.query.MatchQueryBuilder; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.rest.RestStatus; +import org.opensearch.search.SearchHit; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.securityanalytics.correlation.index.query.CorrelationQueryBuilder; +import org.opensearch.securityanalytics.model.Detector; +import org.opensearch.securityanalytics.transport.TransportCorrelateFindingAction; +import org.opensearch.securityanalytics.util.CorrelationIndices; + +import java.io.IOException; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +public class VectorEmbeddingsEngine { + + private final Client client; + + private final TransportCorrelateFindingAction.AsyncCorrelateFindingAction correlateFindingAction; + + private volatile TimeValue indexTimeout; + + private volatile long corrTimeWindow; + + private static final Logger log = LogManager.getLogger(VectorEmbeddingsEngine.class); + + public VectorEmbeddingsEngine(Client client, TimeValue indexTimeout, long corrTimeWindow, + TransportCorrelateFindingAction.AsyncCorrelateFindingAction correlateFindingAction) { + this.client = client; + this.indexTimeout = indexTimeout; + this.corrTimeWindow = corrTimeWindow; + this.correlateFindingAction = correlateFindingAction; + } + + public void insertCorrelatedFindings(String detectorType, Finding finding, String logType, List correlatedFindings, float timestampFeature, List correlationRules) { + long findingTimestamp = finding.getTimestamp().toEpochMilli(); + MatchQueryBuilder queryBuilder = QueryBuilders.matchQuery( + "root", true + ); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.query(queryBuilder); + searchSourceBuilder.fetchSource(true); + searchSourceBuilder.size(1); + SearchRequest searchRequest = new SearchRequest(); + searchRequest.indices(CorrelationIndices.CORRELATION_INDEX); + searchRequest.source(searchSourceBuilder); + + client.search(searchRequest, new ActionListener<>() { + @Override + public void onResponse(SearchResponse response) { + if (response.isTimedOut()) { + correlateFindingAction.onFailures(new OpenSearchStatusException(response.toString(), RestStatus.REQUEST_TIMEOUT)); + } + + Map hitSource = response.getHits().getHits()[0].getSourceAsMap(); + long counter = Long.parseLong(hitSource.get("counter").toString()); + + MultiSearchRequest mSearchRequest = new MultiSearchRequest(); + + for (String correlatedFinding: correlatedFindings) { + BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery() + .must(QueryBuilders.matchQuery( + "finding1", correlatedFinding + )).must(QueryBuilders.matchQuery( + "finding2", "" + ))/*.must(QueryBuilders.matchQuery( + "counter", counter + ))*/; + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.query(queryBuilder); + searchSourceBuilder.fetchSource(true); + searchSourceBuilder.size(10000); + SearchRequest searchRequest = new SearchRequest(); + searchRequest.indices(CorrelationIndices.CORRELATION_INDEX); + searchRequest.source(searchSourceBuilder); + + mSearchRequest.add(searchRequest); + } + + client.multiSearch(mSearchRequest, new ActionListener<>() { + @Override + public void onResponse(MultiSearchResponse items) { + MultiSearchResponse.Item[] responses = items.getResponses(); + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + long prevCounter = -1L; + long totalNeighbors = 0L; + for (MultiSearchResponse.Item response: responses) { + if (response.isFailure()) { + log.info(response.getFailureMessage()); + continue; + } + + long totalHits = response.getResponse().getHits().getTotalHits().value; + totalNeighbors += totalHits; + + for (int idx = 0; idx < totalHits; ++idx) { + SearchHit hit = response.getResponse().getHits().getHits()[idx]; + Map hitSource = hit.getSourceAsMap(); + long neighborCounter = Long.parseLong(hitSource.get("counter").toString()); + String correlatedFinding = hitSource.get("finding1").toString(); + + try { + float[] corrVector = new float[101]; + if (counter != prevCounter) { + for (int i = 0; i < 100; ++i) { + corrVector[i] = ((float) counter) - 50.0f; + } + corrVector[Detector.DetectorType.valueOf(detectorType.toUpperCase(Locale.ROOT)).getDim()] = (float) counter; + corrVector[100] = timestampFeature; + + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + builder.field("root", false); + builder.field("counter", counter); + builder.field("finding1", finding.getId()); + builder.field("finding2", ""); + builder.field("logType", Integer.valueOf(Detector.DetectorType.valueOf(detectorType.toUpperCase(Locale.ROOT)).getDim()).toString()); + builder.field("timestamp", findingTimestamp); + builder.field("corr_vector", corrVector); + builder.field("recordType", "finding"); + builder.field("scoreTimestamp", 0L); + builder.endObject(); + + IndexRequest indexRequest = new IndexRequest(CorrelationIndices.CORRELATION_INDEX) + .source(builder) + .timeout(indexTimeout); + bulkRequest.add(indexRequest); + } + + corrVector = new float[101]; + for (int i = 0; i < 100; ++i) { + corrVector[i] = ((float) counter) - 50.0f; + } + corrVector[Detector.DetectorType.valueOf(detectorType.toUpperCase(Locale.ROOT)).getDim()] = (2.0f * ((float) counter) - 50.0f) / 2.0f; + corrVector[Detector.DetectorType.valueOf(logType.toUpperCase(Locale.ROOT)).getDim()] = (2.0f * ((float) neighborCounter) - 50.0f) / 2.0f; + corrVector[100] = timestampFeature; + + XContentBuilder corrBuilder = XContentFactory.jsonBuilder().startObject(); + corrBuilder.field("root", false); + corrBuilder.field("counter", (long) ((2.0f * ((float) counter) - 50.0f) / 2.0f)); + corrBuilder.field("finding1", finding.getId()); + corrBuilder.field("finding2", correlatedFinding); + corrBuilder.field("logType", String.format(Locale.ROOT, "%s-%s", detectorType, logType)); + corrBuilder.field("timestamp", findingTimestamp); + corrBuilder.field("corr_vector", corrVector); + corrBuilder.field("recordType", "finding-finding"); + corrBuilder.field("scoreTimestamp", 0L); + corrBuilder.field("corrRules", correlationRules); + corrBuilder.endObject(); + + IndexRequest indexRequest = new IndexRequest(CorrelationIndices.CORRELATION_INDEX) + .source(corrBuilder) + .timeout(indexTimeout); + bulkRequest.add(indexRequest); + } catch (IOException ex) { + correlateFindingAction.onFailures(ex); + } + prevCounter = counter; + } + } + + if (totalNeighbors > 0L) { + client.bulk(bulkRequest, new ActionListener<>() { + @Override + public void onResponse(BulkResponse response) { + if (response.hasFailures()) { + correlateFindingAction.onFailures(new OpenSearchStatusException("Correlation of finding failed", RestStatus.INTERNAL_SERVER_ERROR)); + } + correlateFindingAction.onOperation(); + } + + @Override + public void onFailure(Exception e) { + correlateFindingAction.onFailures(e); + } + }); + } else { + insertOrphanFindings(detectorType, finding, timestampFeature); + } + } + + @Override + public void onFailure(Exception e) { + correlateFindingAction.onFailures(e); + } + }); + } + + @Override + public void onFailure(Exception e) { + correlateFindingAction.onFailures(e); + } + }); + } + + public void insertOrphanFindings(String detectorType, Finding finding, float timestampFeature) { + long findingTimestamp = finding.getTimestamp().toEpochMilli(); + MatchQueryBuilder queryBuilder = QueryBuilders.matchQuery( + "root", true + ); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.query(queryBuilder); + searchSourceBuilder.fetchSource(true); + searchSourceBuilder.size(1); + SearchRequest searchRequest = new SearchRequest(); + searchRequest.indices(CorrelationIndices.CORRELATION_INDEX); + searchRequest.source(searchSourceBuilder); + + client.search(searchRequest, new ActionListener<>() { + @Override + public void onResponse(SearchResponse response) { + if (response.isTimedOut()) { + correlateFindingAction.onFailures(new OpenSearchStatusException(response.toString(), RestStatus.REQUEST_TIMEOUT)); + } + + try { + Map hitSource = response.getHits().getHits()[0].getSourceAsMap(); + String id = response.getHits().getHits()[0].getId(); + long counter = Long.parseLong(hitSource.get("counter").toString()); + long timestamp = Long.parseLong(hitSource.get("timestamp").toString()); + if (counter == 0L) { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + builder.field("root", true); + builder.field("counter", 50L); + builder.field("finding1", ""); + builder.field("finding2", ""); + builder.field("logType", ""); + builder.field("timestamp", findingTimestamp); + builder.field("scoreTimestamp", 0L); + builder.endObject(); + + IndexRequest indexRequest = new IndexRequest(CorrelationIndices.CORRELATION_INDEX) + .id(id) + .source(builder) + .timeout(indexTimeout) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + client.index(indexRequest, new ActionListener<>() { + @Override + public void onResponse(IndexResponse response) { + if (response.status().equals(RestStatus.OK)) { + try { + float[] corrVector = new float[101]; + corrVector[Detector.DetectorType.valueOf(detectorType.toUpperCase(Locale.ROOT)).getDim()] = 50.0f; + corrVector[100] = timestampFeature; + + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + builder.field("root", false); + builder.field("counter", 50L); + builder.field("finding1", finding.getId()); + builder.field("finding2", ""); + builder.field("logType", Integer.valueOf(Detector.DetectorType.valueOf(detectorType.toUpperCase(Locale.ROOT)).getDim()).toString()); + builder.field("timestamp", findingTimestamp); + builder.field("corr_vector", corrVector); + builder.field("recordType", "finding"); + builder.field("scoreTimestamp", 0L); + builder.endObject(); + + IndexRequest indexRequest = new IndexRequest(CorrelationIndices.CORRELATION_INDEX) + .source(builder) + .timeout(indexTimeout) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + client.index(indexRequest, new ActionListener<>() { + @Override + public void onResponse(IndexResponse response) { + if (response.status().equals(RestStatus.CREATED)) { + correlateFindingAction.onOperation(); + } else { + correlateFindingAction.onFailures(new OpenSearchStatusException(response.toString(), RestStatus.INTERNAL_SERVER_ERROR)); + } + } + + @Override + public void onFailure(Exception e) { + correlateFindingAction.onFailures(e); + } + }); + } catch (IOException ex) { + correlateFindingAction.onFailures(ex); + } + } + } + + @Override + public void onFailure(Exception e) { + correlateFindingAction.onFailures(e); + } + }); + } else { + if (findingTimestamp - timestamp > corrTimeWindow) { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + builder.field("root", true); + builder.field("counter", 50L); + builder.field("finding1", ""); + builder.field("finding2", ""); + builder.field("logType", ""); + builder.field("timestamp", findingTimestamp); + builder.field("scoreTimestamp", 0L); + builder.endObject(); + + IndexRequest indexRequest = new IndexRequest(CorrelationIndices.CORRELATION_INDEX) + .id(id) + .source(builder) + .timeout(indexTimeout) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + client.index(indexRequest, new ActionListener<>() { + @Override + public void onResponse(IndexResponse response) { + if (response.status().equals(RestStatus.OK)) { + correlateFindingAction.onOperation(); + try { + float[] corrVector = new float[101]; + corrVector[Detector.DetectorType.valueOf(detectorType.toUpperCase(Locale.ROOT)).getDim()] = 50.0f; + corrVector[100] = timestampFeature; + + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + builder.field("root", false); + builder.field("counter", 50L); + builder.field("finding1", finding.getId()); + builder.field("finding2", ""); + builder.field("logType", Integer.valueOf(Detector.DetectorType.valueOf(detectorType.toUpperCase(Locale.ROOT)).getDim()).toString()); + builder.field("timestamp", findingTimestamp); + builder.field("corr_vector", corrVector); + builder.field("recordType", "finding"); + builder.field("scoreTimestamp", 0L); + builder.endObject(); + + IndexRequest indexRequest = new IndexRequest(CorrelationIndices.CORRELATION_INDEX) + .source(builder) + .timeout(indexTimeout) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + client.index(indexRequest, new ActionListener<>() { + @Override + public void onResponse(IndexResponse response) { + if (response.status().equals(RestStatus.CREATED)) { + correlateFindingAction.onOperation(); + } else { + correlateFindingAction.onFailures(new OpenSearchStatusException(response.toString(), RestStatus.INTERNAL_SERVER_ERROR)); + } + } + + @Override + public void onFailure(Exception e) { + correlateFindingAction.onFailures(e); + } + }); + } catch (IOException ex) { + correlateFindingAction.onFailures(ex); + } + } + } + + @Override + public void onFailure(Exception e) { + correlateFindingAction.onFailures(e); + } + }); + } else { + float[] query = new float[101]; + for (int i = 0; i < 100; ++i) { + query[i] = (2.0f * ((float) counter) - 50.0f) / 2.0f; + } + query[100] = timestampFeature; + + CorrelationQueryBuilder correlationQueryBuilder = new CorrelationQueryBuilder("corr_vector", query, 100, QueryBuilders.boolQuery() + .mustNot(QueryBuilders.matchQuery( + "finding1", "" + )).mustNot(QueryBuilders.matchQuery( + "finding2", "" + )).filter(QueryBuilders.rangeQuery("timestamp") + .gte(findingTimestamp - corrTimeWindow) + .lte(findingTimestamp + corrTimeWindow))); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.query(correlationQueryBuilder); + searchSourceBuilder.fetchSource(true); + searchSourceBuilder.size(1); + SearchRequest searchRequest = new SearchRequest(); + searchRequest.indices(CorrelationIndices.CORRELATION_INDEX); + searchRequest.source(searchSourceBuilder); + + client.search(searchRequest, new ActionListener<>() { + @Override + public void onResponse(SearchResponse response) { + if (response.isTimedOut()) { + correlateFindingAction.onFailures(new OpenSearchStatusException(response.toString(), RestStatus.REQUEST_TIMEOUT)); + } + + long totalHits = response.getHits().getTotalHits().value; + SearchHit hit = totalHits > 0? response.getHits().getHits()[0]: null; + long existCounter = 0L; + + if (hit != null) { + Map hitSource = response.getHits().getHits()[0].getSourceAsMap(); + existCounter = Long.parseLong(hitSource.get("counter").toString()); + } + + if (totalHits == 0L || existCounter != ((long) (2.0f * ((float) counter) - 50.0f) / 2.0f)) { + try { + float[] corrVector = new float[101]; + for (int i = 0; i < 100; ++i) { + corrVector[i] = ((float) counter) - 50.0f; + } + corrVector[Detector.DetectorType.valueOf(detectorType.toUpperCase(Locale.ROOT)).getDim()] = (float) counter; + corrVector[100] = timestampFeature; + + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + builder.field("root", false); + builder.field("counter", counter); + builder.field("finding1", finding.getId()); + builder.field("finding2", ""); + builder.field("logType", Integer.valueOf(Detector.DetectorType.valueOf(detectorType.toUpperCase(Locale.ROOT)).getDim()).toString()); + builder.field("timestamp", findingTimestamp); + builder.field("corr_vector", corrVector); + builder.field("recordType", "finding"); + builder.field("scoreTimestamp", 0L); + builder.endObject(); + + IndexRequest indexRequest = new IndexRequest(CorrelationIndices.CORRELATION_INDEX) + .source(builder) + .timeout(indexTimeout) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + client.index(indexRequest, new ActionListener<>() { + @Override + public void onResponse(IndexResponse response) { + if (response.status().equals(RestStatus.CREATED)) { + correlateFindingAction.onOperation(); + } else { + correlateFindingAction.onFailures(new OpenSearchStatusException(response.toString(), RestStatus.INTERNAL_SERVER_ERROR)); + } + } + + @Override + public void onFailure(Exception e) { + correlateFindingAction.onFailures(e); + } + }); + } catch (IOException ex) { + correlateFindingAction.onFailures(ex); + } + } else { + try { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + builder.field("root", true); + builder.field("counter", counter + 50L); + builder.field("finding1", ""); + builder.field("finding2", ""); + builder.field("logType", ""); + builder.field("timestamp", findingTimestamp); + builder.field("scoreTimestamp", 0L); + builder.endObject(); + + IndexRequest indexRequest = new IndexRequest(CorrelationIndices.CORRELATION_INDEX) + .id(id) + .source(builder) + .timeout(indexTimeout) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + client.index(indexRequest, new ActionListener<>() { + @Override + public void onResponse(IndexResponse response) { + if (response.status().equals(RestStatus.OK)) { + try { + float[] corrVector = new float[101]; + for (int i = 0; i < 100; ++i) { + corrVector[i] = (float) counter; + } + corrVector[Detector.DetectorType.valueOf(detectorType.toUpperCase(Locale.ROOT)).getDim()] = counter + 50.0f; + corrVector[100] = timestampFeature; + + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + builder.field("root", false); + builder.field("counter", counter + 50L); + builder.field("finding1", finding.getId()); + builder.field("finding2", ""); + builder.field("logType", Integer.valueOf(Detector.DetectorType.valueOf(detectorType.toUpperCase(Locale.ROOT)).getDim()).toString()); + builder.field("timestamp", findingTimestamp); + builder.field("corr_vector", corrVector); + builder.field("recordType", "finding"); + builder.field("scoreTimestamp", 0L); + builder.endObject(); + + IndexRequest indexRequest = new IndexRequest(CorrelationIndices.CORRELATION_INDEX) + .source(builder) + .timeout(indexTimeout) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + client.index(indexRequest, new ActionListener<>() { + @Override + public void onResponse(IndexResponse response) { + if (response.status().equals(RestStatus.CREATED)) { + correlateFindingAction.onOperation(); + } else { + correlateFindingAction.onFailures(new OpenSearchStatusException(response.toString(), RestStatus.INTERNAL_SERVER_ERROR)); + } + } + + @Override + public void onFailure(Exception e) { + correlateFindingAction.onFailures(e); + } + }); + } catch (IOException ex) { + correlateFindingAction.onFailures(ex); + } + } + } + + @Override + public void onFailure(Exception e) { + correlateFindingAction.onFailures(e); + } + }); + } catch (IOException ex) { + correlateFindingAction.onFailures(ex); + } + } + } + + @Override + public void onFailure(Exception e) { + correlateFindingAction.onFailures(e); + } + }); + } + } + } catch (IOException ex) { + correlateFindingAction.onFailures(ex); + } + } + + @Override + public void onFailure(Exception e) { + correlateFindingAction.onFailures(e); + } + }); + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/correlation/index/CorrelationParamsContext.java b/src/main/java/org/opensearch/securityanalytics/correlation/index/CorrelationParamsContext.java new file mode 100644 index 000000000..1dbfba528 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/correlation/index/CorrelationParamsContext.java @@ -0,0 +1,133 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.correlation.index; + +import org.apache.lucene.index.VectorSimilarityFunction; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.io.stream.Writeable; +import org.opensearch.core.xcontent.ToXContentFragment; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.index.mapper.MapperParsingException; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; + +public class CorrelationParamsContext implements ToXContentFragment, Writeable { + + public static final String VECTOR_SIMILARITY_FUNCTION = "similarityFunction"; + public static final String PARAMETERS = "parameters"; + + private final VectorSimilarityFunction similarityFunction; + private final Map parameters; + + public CorrelationParamsContext(VectorSimilarityFunction similarityFunction, Map parameters) { + this.similarityFunction = similarityFunction; + this.parameters = parameters; + } + + public CorrelationParamsContext(StreamInput sin) throws IOException { + this.similarityFunction = VectorSimilarityFunction.valueOf(sin.readString()); + if (sin.available() > 0) { + this.parameters = sin.readMap(); + } else { + this.parameters = null; + } + } + + public static CorrelationParamsContext parse(Object in) { + if (!(in instanceof Map)) { + throw new MapperParsingException("Unable to parse CorrelationParamsContext"); + } + + @SuppressWarnings("unchecked") + Map contextMap = (Map) in; + VectorSimilarityFunction similarityFunction = VectorSimilarityFunction.EUCLIDEAN; + Map parameters = new HashMap<>(); + + for (Map.Entry contextEntry: contextMap.entrySet()) { + String key = contextEntry.getKey(); + Object value = contextEntry.getValue(); + + if (VECTOR_SIMILARITY_FUNCTION.equals(key)) { + if (value != null && !(value instanceof String)) { + throw new MapperParsingException(String.format(Locale.getDefault(), "%s must be a string", VECTOR_SIMILARITY_FUNCTION)); + } + + try { + similarityFunction = VectorSimilarityFunction.valueOf((String) value); + } catch (IllegalArgumentException ex) { + throw new MapperParsingException(String.format(Locale.getDefault(), "Invalid %s: %s", VECTOR_SIMILARITY_FUNCTION, value)); + } + } else if (PARAMETERS.equals(key)) { + if (value == null) { + parameters = null; + continue; + } + + if (!(value instanceof Map)) { + throw new MapperParsingException("Unable to parse parameters for Correlation context"); + } + + @SuppressWarnings("unchecked") + Map valueMap = (Map) value; + assert parameters != null; + parameters.putAll(valueMap); + } else { + throw new MapperParsingException(String.format(Locale.getDefault(), "Invalid parameter for : %s", key)); + } + } + return new CorrelationParamsContext(similarityFunction, parameters); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(VECTOR_SIMILARITY_FUNCTION, similarityFunction.name()); + if (params == null) { + builder.field(PARAMETERS, (String) null); + } else { + builder.startObject(PARAMETERS); + for (Map.Entry parameter: parameters.entrySet()) { + builder.field(parameter.getKey(), parameter.getValue()); + } + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CorrelationParamsContext that = (CorrelationParamsContext) o; + return similarityFunction == that.similarityFunction && parameters.equals(that.parameters); + } + + @Override + public int hashCode() { + return Objects.hash(similarityFunction, parameters); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(similarityFunction.name()); + if (this.parameters != null) { + out.writeMap(parameters); + } + } + + public VectorSimilarityFunction getSimilarityFunction() { + return similarityFunction; + } + + public Map getParameters() { + return parameters; + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/correlation/index/VectorField.java b/src/main/java/org/opensearch/securityanalytics/correlation/index/VectorField.java new file mode 100644 index 000000000..bd4c2b619 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/correlation/index/VectorField.java @@ -0,0 +1,25 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.correlation.index; + +import org.apache.lucene.document.Field; +import org.apache.lucene.index.IndexableFieldType; +import org.apache.lucene.util.BytesRef; +import org.opensearch.securityanalytics.correlation.index.codec.util.CorrelationVectorAsArraySerializer; +import org.opensearch.securityanalytics.correlation.index.codec.util.CorrelationVectorSerializer; + +public class VectorField extends Field { + + public VectorField(String name, float[] value, IndexableFieldType type) { + super(name, new BytesRef(), type); + try { + final CorrelationVectorSerializer vectorSerializer = new CorrelationVectorAsArraySerializer(); + final byte[] floatToByte = vectorSerializer.floatToByteArray(value); + this.setBytesValue(floatToByte); + } catch (Exception ex) { + throw new RuntimeException(ex); + } + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/correlation/index/codec/BasePerFieldCorrelationVectorsFormat.java b/src/main/java/org/opensearch/securityanalytics/correlation/index/codec/BasePerFieldCorrelationVectorsFormat.java new file mode 100644 index 000000000..0508c97bf --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/correlation/index/codec/BasePerFieldCorrelationVectorsFormat.java @@ -0,0 +1,74 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.correlation.index.codec; + +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; +import org.opensearch.index.mapper.MapperService; +import org.opensearch.securityanalytics.correlation.CorrelationConstants; +import org.opensearch.securityanalytics.correlation.index.mapper.CorrelationVectorFieldMapper; + +import java.util.Locale; +import java.util.Map; +import java.util.Optional; +import java.util.function.BiFunction; +import java.util.function.Supplier; + +public abstract class BasePerFieldCorrelationVectorsFormat extends PerFieldKnnVectorsFormat { + + private final Optional mapperService; + private final int defaultMaxConnections; + private final int defaultBeamWidth; + private final Supplier defaultFormatSupplier; + private final BiFunction formatSupplier; + + public BasePerFieldCorrelationVectorsFormat(Optional mapperService, + int defaultMaxConnections, + int defaultBeamWidth, + Supplier defaultFormatSupplier, + BiFunction formatSupplier) { + this.mapperService = mapperService; + this.defaultMaxConnections = defaultMaxConnections; + this.defaultBeamWidth = defaultBeamWidth; + this.defaultFormatSupplier = defaultFormatSupplier; + this.formatSupplier = formatSupplier; + } + + @Override + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + if (!isCorrelationVectorFieldType(field)) { + return defaultFormatSupplier.get(); + } + + var type = (CorrelationVectorFieldMapper.CorrelationVectorFieldType) mapperService.orElseThrow( + () -> new IllegalArgumentException(String.format(Locale.getDefault(), + "Cannot read field type for field [%s] because mapper service is not available", field))) + .fieldType(field); + + var params = type.getCorrelationParams().getParameters(); + int maxConnections = getMaxConnections(params); + int beamWidth = getBeamWidth(params); + + return formatSupplier.apply(maxConnections, beamWidth); + } + + private boolean isCorrelationVectorFieldType(final String field) { + return mapperService.isPresent() && mapperService.get().fieldType(field) instanceof CorrelationVectorFieldMapper.CorrelationVectorFieldType; + } + + private int getMaxConnections(final Map params) { + if (params != null && params.containsKey(CorrelationConstants.METHOD_PARAMETER_M)) { + return (int) params.get(CorrelationConstants.METHOD_PARAMETER_M); + } + return defaultMaxConnections; + } + + private int getBeamWidth(final Map params) { + if (params != null && params.containsKey(CorrelationConstants.METHOD_PARAMETER_EF_CONSTRUCTION)) { + return (int) params.get(CorrelationConstants.METHOD_PARAMETER_EF_CONSTRUCTION); + } + return defaultBeamWidth; + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/correlation/index/codec/CorrelationCodecService.java b/src/main/java/org/opensearch/securityanalytics/correlation/index/codec/CorrelationCodecService.java new file mode 100644 index 000000000..9b29dd106 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/correlation/index/codec/CorrelationCodecService.java @@ -0,0 +1,25 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.correlation.index.codec; + +import org.apache.lucene.codecs.Codec; +import org.opensearch.index.codec.CodecService; +import org.opensearch.index.codec.CodecServiceConfig; +import org.opensearch.index.mapper.MapperService; + +public class CorrelationCodecService extends CodecService { + + private final MapperService mapperService; + + public CorrelationCodecService(CodecServiceConfig codecServiceConfig) { + super(codecServiceConfig.getMapperService(), codecServiceConfig.getLogger()); + mapperService = codecServiceConfig.getMapperService(); + } + + @Override + public Codec codec(String name) { + return CorrelationCodecVersion.current().getCorrelationCodecSupplier().apply(super.codec(name), mapperService); + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/correlation/index/codec/CorrelationCodecVersion.java b/src/main/java/org/opensearch/securityanalytics/correlation/index/codec/CorrelationCodecVersion.java new file mode 100644 index 000000000..2e6091572 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/correlation/index/codec/CorrelationCodecVersion.java @@ -0,0 +1,68 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.correlation.index.codec; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.codecs.lucene95.Lucene95Codec; +import org.opensearch.index.mapper.MapperService; +import org.opensearch.securityanalytics.correlation.index.codec.correlation950.CorrelationCodec; +import org.opensearch.securityanalytics.correlation.index.codec.correlation950.PerFieldCorrelationVectorsFormat; + +import java.util.Optional; +import java.util.function.BiFunction; +import java.util.function.Supplier; + +public enum CorrelationCodecVersion { + V_9_5_0( + "CorrelationCodec", + new Lucene95Codec(), + new PerFieldCorrelationVectorsFormat(Optional.empty()), + (userCodec, mapperService) -> new CorrelationCodec(userCodec, new PerFieldCorrelationVectorsFormat(Optional.of(mapperService))), + CorrelationCodec::new + ); + + private static final CorrelationCodecVersion CURRENT = V_9_5_0; + private final String codecName; + private final Codec defaultCodecDelegate; + private final PerFieldCorrelationVectorsFormat perFieldCorrelationVectorsFormat; + private final BiFunction correlationCodecSupplier; + private final Supplier defaultCorrelationCodecSupplier; + + CorrelationCodecVersion(String codecName, + Codec defaultCodecDelegate, + PerFieldCorrelationVectorsFormat perFieldCorrelationVectorsFormat, + BiFunction correlationCodecSupplier, + Supplier defaultCorrelationCodecSupplier) { + this.codecName = codecName; + this.defaultCodecDelegate = defaultCodecDelegate; + this.perFieldCorrelationVectorsFormat = perFieldCorrelationVectorsFormat; + this.correlationCodecSupplier = correlationCodecSupplier; + this.defaultCorrelationCodecSupplier = defaultCorrelationCodecSupplier; + } + + public String getCodecName() { + return codecName; + } + + public Codec getDefaultCodecDelegate() { + return defaultCodecDelegate; + } + + public PerFieldCorrelationVectorsFormat getPerFieldCorrelationVectorsFormat() { + return perFieldCorrelationVectorsFormat; + } + + public BiFunction getCorrelationCodecSupplier() { + return correlationCodecSupplier; + } + + public Supplier getDefaultCorrelationCodecSupplier() { + return defaultCorrelationCodecSupplier; + } + + public static final CorrelationCodecVersion current() { + return CURRENT; + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/correlation/index/codec/correlation950/CorrelationCodec.java b/src/main/java/org/opensearch/securityanalytics/correlation/index/codec/correlation950/CorrelationCodec.java new file mode 100644 index 000000000..87f0ce8f1 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/correlation/index/codec/correlation950/CorrelationCodec.java @@ -0,0 +1,29 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.correlation.index.codec.correlation950; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.codecs.FilterCodec; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.opensearch.securityanalytics.correlation.index.codec.CorrelationCodecVersion; + +public class CorrelationCodec extends FilterCodec { + private static final CorrelationCodecVersion VERSION = CorrelationCodecVersion.V_9_5_0; + private final PerFieldCorrelationVectorsFormat perFieldCorrelationVectorsFormat; + + public CorrelationCodec() { + this(VERSION.getDefaultCodecDelegate(), VERSION.getPerFieldCorrelationVectorsFormat()); + } + + public CorrelationCodec(Codec delegate, PerFieldCorrelationVectorsFormat perFieldCorrelationVectorsFormat) { + super(VERSION.getCodecName(), delegate); + this.perFieldCorrelationVectorsFormat = perFieldCorrelationVectorsFormat; + } + + @Override + public KnnVectorsFormat knnVectorsFormat() { + return perFieldCorrelationVectorsFormat; + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/correlation/index/codec/correlation950/PerFieldCorrelationVectorsFormat.java b/src/main/java/org/opensearch/securityanalytics/correlation/index/codec/correlation950/PerFieldCorrelationVectorsFormat.java new file mode 100644 index 000000000..239e66ade --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/correlation/index/codec/correlation950/PerFieldCorrelationVectorsFormat.java @@ -0,0 +1,24 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.correlation.index.codec.correlation950; + +import org.apache.lucene.codecs.lucene95.Lucene95HnswVectorsFormat; +import org.opensearch.index.mapper.MapperService; +import org.opensearch.securityanalytics.correlation.index.codec.BasePerFieldCorrelationVectorsFormat; + +import java.util.Optional; + +public class PerFieldCorrelationVectorsFormat extends BasePerFieldCorrelationVectorsFormat { + + public PerFieldCorrelationVectorsFormat(final Optional mapperService) { + super( + mapperService, + Lucene95HnswVectorsFormat.DEFAULT_MAX_CONN, + Lucene95HnswVectorsFormat.DEFAULT_BEAM_WIDTH, + () -> new Lucene95HnswVectorsFormat(), + (maxConn, beamWidth) -> new Lucene95HnswVectorsFormat(maxConn, beamWidth) + ); + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/correlation/index/codec/util/CorrelationVectorAsArraySerializer.java b/src/main/java/org/opensearch/securityanalytics/correlation/index/codec/util/CorrelationVectorAsArraySerializer.java new file mode 100644 index 000000000..c03ceb6f4 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/correlation/index/codec/util/CorrelationVectorAsArraySerializer.java @@ -0,0 +1,41 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.correlation.index.codec.util; + +import org.opensearch.ExceptionsHelper; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; + +public class CorrelationVectorAsArraySerializer implements CorrelationVectorSerializer { + + @Override + public byte[] floatToByteArray(float[] input) { + byte[] bytes; + try( + ByteArrayOutputStream byteStream = new ByteArrayOutputStream(); + ObjectOutputStream objectStream = new ObjectOutputStream(byteStream); + ) { + objectStream.writeObject(input); + bytes = byteStream.toByteArray(); + } catch (IOException ex) { + throw ExceptionsHelper.convertToOpenSearchException(ex); + } + return bytes; + } + + @Override + public float[] byteToFloatArray(ByteArrayInputStream byteStream) { + try { + ObjectInputStream objectStream = new ObjectInputStream(byteStream); + return (float[]) objectStream.readObject(); + } catch (IOException | ClassNotFoundException ex) { + throw ExceptionsHelper.convertToOpenSearchException(ex); + } + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/correlation/index/codec/util/CorrelationVectorSerializer.java b/src/main/java/org/opensearch/securityanalytics/correlation/index/codec/util/CorrelationVectorSerializer.java new file mode 100644 index 000000000..9b474f3ee --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/correlation/index/codec/util/CorrelationVectorSerializer.java @@ -0,0 +1,14 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.correlation.index.codec.util; + +import java.io.ByteArrayInputStream; + +public interface CorrelationVectorSerializer { + + byte[] floatToByteArray(float[] input); + + float[] byteToFloatArray(ByteArrayInputStream byteStream); +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/correlation/index/mapper/CorrelationVectorFieldMapper.java b/src/main/java/org/opensearch/securityanalytics/correlation/index/mapper/CorrelationVectorFieldMapper.java new file mode 100644 index 000000000..ed269fb80 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/correlation/index/mapper/CorrelationVectorFieldMapper.java @@ -0,0 +1,310 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.correlation.index.mapper; + +import org.apache.lucene.search.FieldExistsQuery; +import org.apache.lucene.search.Query; +import org.opensearch.common.Explicit; +import org.opensearch.common.xcontent.support.XContentMapValues; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.index.mapper.FieldMapper; +import org.opensearch.index.mapper.MappedFieldType; +import org.opensearch.index.mapper.Mapper; +import org.opensearch.index.mapper.MapperParsingException; +import org.opensearch.index.mapper.ParametrizedFieldMapper; +import org.opensearch.index.mapper.ParseContext; +import org.opensearch.index.mapper.TextSearchInfo; +import org.opensearch.index.mapper.ValueFetcher; +import org.opensearch.index.query.QueryShardContext; +import org.opensearch.index.query.QueryShardException; +import org.opensearch.search.lookup.SearchLookup; +import org.opensearch.securityanalytics.correlation.CorrelationConstants; +import org.opensearch.securityanalytics.correlation.index.CorrelationParamsContext; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Optional; + +public abstract class CorrelationVectorFieldMapper extends ParametrizedFieldMapper { + + public static final String CONTENT_TYPE = "sa_vector"; + + private static CorrelationVectorFieldMapper toType(FieldMapper in) { + return (CorrelationVectorFieldMapper) in; + } + + public static class Builder extends ParametrizedFieldMapper.Builder { + protected Boolean ignoreMalformed; + + protected final Parameter stored = Parameter.boolParam("store", false, m -> toType(m).stored, false); + protected final Parameter hasDocValues = Parameter.boolParam("doc_values", false, m -> toType(m).hasDocValues, true); + protected final Parameter dimension = new Parameter<>(CorrelationConstants.DIMENSION, false, () -> -1, (n, c, o) -> { + if (o == null) { + throw new IllegalArgumentException("Dimension cannot be null"); + } + int value; + try { + value = XContentMapValues.nodeIntegerValue(o); + } catch (Exception ex) { + throw new IllegalArgumentException(String.format(Locale.getDefault(), + "Unable to parse [dimension] from provided value [%s] for vector [%s]", o, name)); + } + if (value <= 0) { + throw new IllegalArgumentException(String.format(Locale.getDefault(), "Dimension value must be greater than 0 for vector: %s", name)); + } + return value; + }, m -> toType(m).dimension); + + protected final Parameter correlationParamsContext = new Parameter<>( + CorrelationConstants.CORRELATION_CONTEXT, + false, + () -> null, + (n, c, o) -> CorrelationParamsContext.parse(o), + m -> toType(m).correlationParams + ); + + protected final Parameter> meta = Parameter.metaParam(); + + public Builder(String name) { + super(name); + } + + @Override + protected List> getParameters() { + return Arrays.asList(stored, hasDocValues, dimension, meta, correlationParamsContext); + } + + protected Explicit ignoreMalformed(BuilderContext context) { + if (ignoreMalformed != null) { + return new Explicit<>(ignoreMalformed, true); + } + if (context.indexSettings() != null) { + return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false); + } + return Defaults.IGNORE_MALFORMED; + } + + @Override + public ParametrizedFieldMapper build(BuilderContext context) { + final CorrelationParamsContext correlationParams = correlationParamsContext.getValue(); + final MultiFields multiFieldsBuilder = this.multiFieldsBuilder.build(this, context); + final CopyTo copyToBuilder = copyTo.build(); + final Explicit ignoreMalformed = ignoreMalformed(context); + final Map metaValue = meta.getValue(); + + final CorrelationVectorFieldType mappedFieldType = new CorrelationVectorFieldType( + buildFullName(context), + metaValue, + dimension.getValue(), + correlationParams + ); + + LuceneFieldMapper.CreateLuceneFieldMapperInput createLuceneFieldMapperInput = + new LuceneFieldMapper.CreateLuceneFieldMapperInput( + name, + mappedFieldType, + multiFieldsBuilder, + copyToBuilder, + ignoreMalformed, + stored.get(), + hasDocValues.get(), + correlationParams + ); + return new LuceneFieldMapper(createLuceneFieldMapperInput); + } + } + + public static class TypeParser implements Mapper.TypeParser { + + public TypeParser() {} + + @Override + public Mapper.Builder parse(String name, Map node, ParserContext context) throws MapperParsingException { + Builder builder = new CorrelationVectorFieldMapper.Builder(name); + builder.parse(name, context, node); + + if (builder.dimension.getValue() == -1) { + throw new IllegalArgumentException(String.format(Locale.getDefault(), "Dimension value missing for vector: %s", name)); + } + return builder; + } + } + + public static class CorrelationVectorFieldType extends MappedFieldType { + int dimension; + CorrelationParamsContext correlationParams; + + public CorrelationVectorFieldType(String name, Map meta, int dimension) { + this(name, meta, dimension, null); + } + + public CorrelationVectorFieldType(String name, Map meta, int dimension, CorrelationParamsContext correlationParams) { + super(name, false, false, true, TextSearchInfo.NONE, meta); + this.dimension = dimension; + this.correlationParams = correlationParams; + } + + @Override + public ValueFetcher valueFetcher(QueryShardContext context, SearchLookup searchLookup, String s) { + throw new UnsupportedOperationException("Correlation Vector do not support fields search"); + } + + @Override + public String typeName() { + return CONTENT_TYPE; + } + + @Override + public Query existsQuery(QueryShardContext context) { + return new FieldExistsQuery(name()); + } + + @Override + public Query termQuery(Object o, QueryShardContext context) { + throw new QueryShardException( + context, + String.format(Locale.getDefault(), "Correlation vector do not support exact searching, use Correlation queries instead: [%s]", name()) + ); + } + + public int getDimension() { + return dimension; + } + + public CorrelationParamsContext getCorrelationParams() { + return correlationParams; + } + } + + protected Explicit ignoreMalformed; + protected boolean stored; + protected boolean hasDocValues; + protected Integer dimension; + protected CorrelationParamsContext correlationParams; + + public CorrelationVectorFieldMapper( + String simpleName, + CorrelationVectorFieldType mappedFieldType, + FieldMapper.MultiFields multiFields, + FieldMapper.CopyTo copyTo, + Explicit ignoreMalformed, + boolean stored, + boolean hasDocValues + ) { + super(simpleName, mappedFieldType, multiFields, copyTo); + this.ignoreMalformed = ignoreMalformed; + this.stored = stored; + this.hasDocValues = hasDocValues; + this.dimension = mappedFieldType.getDimension(); + } + + @Override + protected CorrelationVectorFieldMapper clone() { + return (CorrelationVectorFieldMapper) super.clone(); + } + + @Override + protected String contentType() { + return CONTENT_TYPE; + } + + @Override + protected void parseCreateField(ParseContext parseContext) throws IOException { + parseCreateField(parseContext, fieldType().getDimension()); + } + + protected abstract void parseCreateField(ParseContext parseContext, int dimension) throws IOException; + + Optional getFloatsFromContext(ParseContext context, int dimension) throws IOException { + context.path().add(simpleName()); + + List vector = new ArrayList<>(); + XContentParser.Token token = context.parser().currentToken(); + float value; + if (token == XContentParser.Token.START_ARRAY) { + token = context.parser().nextToken(); + while (token != XContentParser.Token.END_ARRAY) { + value = context.parser().floatValue(); + + if (Float.isNaN(value)) { + throw new IllegalArgumentException("Correlation vector values cannot be NaN"); + } + + if (Float.isInfinite(value)) { + throw new IllegalArgumentException("Correlation vector values cannot be infinity"); + } + vector.add(value); + token = context.parser().nextToken(); + } + } else if (token == XContentParser.Token.VALUE_NUMBER) { + value = context.parser().floatValue(); + if (Float.isNaN(value)) { + throw new IllegalArgumentException("Correlation vector values cannot be NaN"); + } + + if (Float.isInfinite(value)) { + throw new IllegalArgumentException("Correlation vector values cannot be infinity"); + } + vector.add(value); + context.parser().nextToken(); + } else if (token == XContentParser.Token.VALUE_NULL) { + context.path().remove(); + return Optional.empty(); + } + + if (dimension != vector.size()) { + String errorMessage = String.format("Vector dimension mismatch. Expected: %d, Given: %d", dimension, vector.size()); + throw new IllegalArgumentException(errorMessage); + } + + float[] array = new float[vector.size()]; + int i = 0; + for (Float f: vector) { + array[i++] = f; + } + return Optional.of(array); + } + + @Override + protected boolean docValuesByDefault() { + return true; + } + + @Override + public ParametrizedFieldMapper.Builder getMergeBuilder() { + return new CorrelationVectorFieldMapper.Builder(simpleName()).init(this); + } + + @Override + public boolean parsesArrayValue() { + return true; + } + + @Override + public CorrelationVectorFieldType fieldType() { + return (CorrelationVectorFieldType) super.fieldType(); + } + + @Override + protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { + super.doXContentBody(builder, includeDefaults, params); + if (includeDefaults || ignoreMalformed.explicit()) { + builder.field(Names.IGNORE_MALFORMED, ignoreMalformed.value()); + } + } + + public static class Names { + public static final String IGNORE_MALFORMED = "ignore_malformed"; + } + + public static class Defaults { + public static final Explicit IGNORE_MALFORMED = new Explicit<>(false, false); + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/correlation/index/mapper/LuceneFieldMapper.java b/src/main/java/org/opensearch/securityanalytics/correlation/index/mapper/LuceneFieldMapper.java new file mode 100644 index 000000000..11620addc --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/correlation/index/mapper/LuceneFieldMapper.java @@ -0,0 +1,156 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.correlation.index.mapper; + +import org.apache.lucene.document.FieldType; +import org.apache.lucene.document.KnnVectorField; +import org.apache.lucene.document.StoredField; +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.VectorSimilarityFunction; +import org.opensearch.common.Explicit; +import org.opensearch.index.mapper.ParseContext; +import org.opensearch.securityanalytics.correlation.index.CorrelationParamsContext; +import org.opensearch.securityanalytics.correlation.index.VectorField; + +import java.io.IOException; +import java.util.Optional; + +import static org.apache.lucene.index.VectorValues.MAX_DIMENSIONS; + +public class LuceneFieldMapper extends CorrelationVectorFieldMapper { + + private static final int LUCENE_MAX_DIMENSION = MAX_DIMENSIONS; + + private final FieldType vectorFieldType; + + public LuceneFieldMapper(final CreateLuceneFieldMapperInput input) { + super( + input.getName(), + input.getMappedFieldType(), + input.getMultiFields(), + input.getCopyTo(), + input.getIgnoreMalformed(), + input.isStored(), + input.isHasDocValues() + ); + + this.correlationParams = input.getCorrelationParams(); + final VectorSimilarityFunction vectorSimilarityFunction = this.correlationParams.getSimilarityFunction(); + + final int dimension = input.getMappedFieldType().getDimension(); + if (dimension > LUCENE_MAX_DIMENSION) { + throw new IllegalArgumentException( + String.format( + "Dimension value cannot be greater than [%s] but got [%s] for vector [%s]", + LUCENE_MAX_DIMENSION, + dimension, + input.getName() + ) + ); + } + + this.fieldType = KnnVectorField.createFieldType(dimension, vectorSimilarityFunction); + + if (this.hasDocValues) { + this.vectorFieldType = buildDocValuesFieldType(); + } else { + this.vectorFieldType = null; + } + } + + private static FieldType buildDocValuesFieldType() { + FieldType field = new FieldType(); + field.setDocValuesType(DocValuesType.BINARY); + field.freeze(); + return field; + } + + @Override + protected void parseCreateField(ParseContext context, int dimension) throws IOException { + Optional arrayOptional = getFloatsFromContext(context, dimension); + + if (arrayOptional.isEmpty()) { + return; + } + final float[] array = arrayOptional.get(); + + KnnVectorField point = new KnnVectorField(name(), array, fieldType); + + context.doc().add(point); + if (fieldType.stored()) { + context.doc().add(new StoredField(name(), point.toString())); + } + if (hasDocValues && vectorFieldType != null) { + context.doc().add(new VectorField(name(), array, vectorFieldType)); + } + context.path().remove(); + } + + static class CreateLuceneFieldMapperInput { + String name; + + CorrelationVectorFieldType mappedFieldType; + + MultiFields multiFields; + + CopyTo copyTo; + + Explicit ignoreMalformed; + boolean stored; + boolean hasDocValues; + + CorrelationParamsContext correlationParams; + + public CreateLuceneFieldMapperInput(String name, + CorrelationVectorFieldType mappedFieldType, + MultiFields multiFields, + CopyTo copyTo, + Explicit ignoreMalformed, + boolean stored, + boolean hasDocValues, + CorrelationParamsContext correlationParams) { + this.name = name; + this.mappedFieldType = mappedFieldType; + this.multiFields = multiFields; + this.copyTo = copyTo; + this.ignoreMalformed = ignoreMalformed; + this.stored = stored; + this.hasDocValues = hasDocValues; + this.correlationParams = correlationParams; + } + + public String getName() { + return name; + } + + public CorrelationVectorFieldType getMappedFieldType() { + return mappedFieldType; + } + + public MultiFields getMultiFields() { + return multiFields; + } + + public CopyTo getCopyTo() { + return copyTo; + } + + public Explicit getIgnoreMalformed() { + return ignoreMalformed; + } + + public boolean isStored() { + return stored; + } + + public boolean isHasDocValues() { + return hasDocValues; + } + + public CorrelationParamsContext getCorrelationParams() { + return correlationParams; + } + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/correlation/index/query/CorrelationQueryBuilder.java b/src/main/java/org/opensearch/securityanalytics/correlation/index/query/CorrelationQueryBuilder.java new file mode 100644 index 000000000..654358457 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/correlation/index/query/CorrelationQueryBuilder.java @@ -0,0 +1,236 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.correlation.index.query; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.search.Query; +import org.opensearch.common.ParsingException; +import org.opensearch.common.Strings; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.core.ParseField; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.index.mapper.MappedFieldType; +import org.opensearch.index.mapper.NumberFieldMapper; +import org.opensearch.index.query.AbstractQueryBuilder; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.index.query.QueryShardContext; +import org.opensearch.securityanalytics.correlation.index.mapper.CorrelationVectorFieldMapper; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Locale; +import java.util.Objects; + +public class CorrelationQueryBuilder extends AbstractQueryBuilder { + + private static final Logger log = LogManager.getLogger(CorrelationQueryBuilder.class); + public static final ParseField VECTOR_FIELD = new ParseField("vector"); + public static final ParseField K_FIELD = new ParseField("k"); + public static final ParseField FILTER_FIELD = new ParseField("filter"); + public static int K_MAX = 10000; + + public static final String NAME = "correlation"; + + private final String fieldName; + private final float[] vector; + private int k = 0; + private QueryBuilder filter; + + public CorrelationQueryBuilder(String fieldName, float[] vector, int k) { + this(fieldName, vector, k, null); + } + + public CorrelationQueryBuilder(String fieldName, float[] vector, int k, QueryBuilder filter) { + if (Strings.isNullOrEmpty(fieldName)) { + throw new IllegalArgumentException(String.format(Locale.getDefault(), "[%s] requires fieldName", NAME)); + } + if (vector == null) { + throw new IllegalArgumentException(String.format(Locale.getDefault(), "[%s] requires query vector", NAME)); + } + if (vector.length == 0) { + throw new IllegalArgumentException(String.format(Locale.getDefault(), "[%s] query vector is empty", NAME)); + } + if (k <= 0) { + throw new IllegalArgumentException(String.format(Locale.getDefault(), "[%s] requires k > 0", NAME)); + } + if (k > K_MAX) { + throw new IllegalArgumentException(String.format(Locale.getDefault(), "[%s] requires k <= ", K_MAX)); + } + + this.fieldName = fieldName; + this.vector = vector; + this.k = k; + this.filter = filter; + } + + public CorrelationQueryBuilder(StreamInput sin) throws IOException { + super(sin); + try { + this.fieldName = sin.readString(); + this.vector = sin.readFloatArray(); + this.k = sin.readInt(); + this.filter = sin.readOptionalNamedWriteable(QueryBuilder.class); + } catch (IOException ex) { + throw new RuntimeException("Unable to create CorrelationQueryBuilder", ex); + } + } + + private static float[] objectsToFloats(List objs) { + float[] vector = new float[objs.size()]; + for (int i = 0; i < objs.size(); ++i) { + vector[i] = ((Number) objs.get(i)).floatValue(); + } + return vector; + } + + public static CorrelationQueryBuilder fromXContent(XContentParser parser) throws IOException { + String fieldName = null; + List vector = null; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; + + int k = 0; + QueryBuilder filter = null; + String queryName = null; + String currentFieldName = null; + XContentParser.Token token; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token == XContentParser.Token.START_OBJECT) { + throwParsingExceptionOnMultipleFields(NAME, parser.getTokenLocation(), fieldName, currentFieldName); + fieldName = currentFieldName; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue() || token == XContentParser.Token.START_ARRAY) { + if (VECTOR_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + vector = parser.list(); + } else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + boost = parser.floatValue(); + } else if (K_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + k = (Integer) NumberFieldMapper.NumberType.INTEGER.parse(parser.objectBytes(), false); + } else if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + queryName = parser.text(); + } else { + throw new ParsingException( + parser.getTokenLocation(), + "[" + NAME + "] query does not support [" + currentFieldName + "]" + ); + } + } else if (token == XContentParser.Token.START_OBJECT) { + String tokenName = parser.currentName(); + if (FILTER_FIELD.getPreferredName().equals(tokenName)) { + filter = parseInnerQueryBuilder(parser); + } else { + throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] unknown token [" + token + "]"); + } + } else { + throw new ParsingException( + parser.getTokenLocation(), + "[" + NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]" + ); + } + } + } else { + throwParsingExceptionOnMultipleFields(NAME, parser.getTokenLocation(), fieldName, parser.currentName()); + fieldName = parser.currentName(); + vector = parser.list(); + } + } + + assert vector != null; + CorrelationQueryBuilder correlationQueryBuilder = new CorrelationQueryBuilder(fieldName, objectsToFloats(vector), k, filter); + correlationQueryBuilder.queryName(queryName); + correlationQueryBuilder.boost(boost); + return correlationQueryBuilder; + } + + public String fieldName() { + return fieldName; + } + + public Object vector() { + return vector; + } + + public int getK() { + return k; + } + + public QueryBuilder getFilter() { + return filter; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(fieldName); + out.writeFloatArray(vector); + out.writeInt(k); + out.writeOptionalNamedWriteable(filter); + } + + @Override + public void doXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + builder.startObject(fieldName); + + builder.field(VECTOR_FIELD.getPreferredName(), vector); + builder.field(K_FIELD.getPreferredName(), k); + if (filter != null) { + builder.field(FILTER_FIELD.getPreferredName(), filter); + } + printBoostAndQueryName(builder); + builder.endObject(); + builder.endObject(); + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + MappedFieldType mappedFieldType = context.fieldMapper(fieldName); + + if (!(mappedFieldType instanceof CorrelationVectorFieldMapper.CorrelationVectorFieldType)) { + throw new IllegalArgumentException(String.format(Locale.getDefault(), "Field '%s' is not knn_vector type.", this.fieldName)); + } + + CorrelationVectorFieldMapper.CorrelationVectorFieldType correlationVectorFieldType = (CorrelationVectorFieldMapper.CorrelationVectorFieldType) mappedFieldType; + int fieldDimension = correlationVectorFieldType.getDimension(); + + if (fieldDimension != vector.length) { + throw new IllegalArgumentException( + String.format(Locale.getDefault(), "Query vector has invalid dimension: %d. Dimension should be: %d", vector.length, fieldDimension) + ); + } + + String indexName = context.index().getName(); + CorrelationQueryFactory.CreateQueryRequest createQueryRequest = new CorrelationQueryFactory.CreateQueryRequest( + indexName, + this.fieldName, + this.vector, + this.k, + this.filter, + context + ); + return CorrelationQueryFactory.create(createQueryRequest); + } + + @Override + protected boolean doEquals(CorrelationQueryBuilder other) { + return Objects.equals(fieldName, other.fieldName) && Arrays.equals(vector, other.vector) && Objects.equals(k, other.k); + } + + @Override + protected int doHashCode() { + return Objects.hash(fieldName, vector, k); + } + + @Override + public String getWriteableName() { + return NAME; + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/correlation/index/query/CorrelationQueryFactory.java b/src/main/java/org/opensearch/securityanalytics/correlation/index/query/CorrelationQueryFactory.java new file mode 100644 index 000000000..9bfbf7e91 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/correlation/index/query/CorrelationQueryFactory.java @@ -0,0 +1,89 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.correlation.index.query; + +import org.apache.lucene.search.KnnVectorQuery; +import org.apache.lucene.search.Query; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.index.query.QueryShardContext; + +import java.io.IOException; +import java.util.Optional; + +public class CorrelationQueryFactory { + + public static Query create(CreateQueryRequest createQueryRequest) { + final String indexName = createQueryRequest.getIndexName(); + final String fieldName = createQueryRequest.getFieldName(); + final int k = createQueryRequest.getK(); + final float[] vector = createQueryRequest.getVector(); + + if (createQueryRequest.getFilter().isPresent()) { + final QueryShardContext context = createQueryRequest.getContext().orElseThrow( + () -> new RuntimeException("Shard context cannot be null") + ); + + try { + final Query filterQuery = createQueryRequest.getFilter().get().toQuery(context); + return new KnnVectorQuery(fieldName, vector, k, filterQuery); + } catch (IOException ex) { + throw new RuntimeException("Cannot create knn query with filter", ex); + } + } + return new KnnVectorQuery(fieldName, vector, k); + } + + static class CreateQueryRequest { + private String indexName; + + private String fieldName; + + private float[] vector; + + private int k; + + private QueryBuilder filter; + + private QueryShardContext context; + + public CreateQueryRequest(String indexName, + String fieldName, + float[] vector, + int k, + QueryBuilder filter, + QueryShardContext context) { + this.indexName = indexName; + this.fieldName = fieldName; + this.vector = vector; + this.k = k; + this.filter = filter; + this.context = context; + } + + public String getIndexName() { + return indexName; + } + + public String getFieldName() { + return fieldName; + } + + public float[] getVector() { + return vector; + } + + public int getK() { + return k; + } + + public Optional getFilter() { + return Optional.ofNullable(filter); + } + + public Optional getContext() { + return Optional.ofNullable(context); + } + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/findings/FindingsService.java b/src/main/java/org/opensearch/securityanalytics/findings/FindingsService.java index eba6c15bc..dc5880ec6 100644 --- a/src/main/java/org/opensearch/securityanalytics/findings/FindingsService.java +++ b/src/main/java/org/opensearch/securityanalytics/findings/FindingsService.java @@ -5,18 +5,19 @@ package org.opensearch.securityanalytics.findings; import java.util.ArrayList; -import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.opensearch.OpenSearchStatusException; import org.opensearch.action.ActionListener; -import org.opensearch.action.support.GroupedActionListener; import org.opensearch.client.Client; import org.opensearch.client.node.NodeClient; import org.opensearch.commons.alerting.AlertingPluginInterface; +import org.opensearch.commons.alerting.model.DocLevelQuery; import org.opensearch.commons.alerting.model.FindingWithDocs; import org.opensearch.commons.alerting.model.Table; import org.opensearch.rest.RestStatus; @@ -51,7 +52,7 @@ public FindingsService(Client client) { * @param table group of search related parameters * @param listener ActionListener to get notified on response or error */ - public void getFindingsByDetectorId(String detectorId, Table table, ActionListener listener) { + public void getFindingsByDetectorId(String detectorId, Table table, ActionListener listener ) { this.client.execute(GetDetectorAction.INSTANCE, new GetDetectorRequest(detectorId, -3L), new ActionListener<>() { @Override @@ -59,17 +60,15 @@ public void onResponse(GetDetectorResponse getDetectorResponse) { // Get all monitor ids from detector Detector detector = getDetectorResponse.getDetector(); List monitorIds = detector.getMonitorIds(); - // Using GroupedActionListener here as we're going to issue one GetFindingsActions for each monitorId - ActionListener multiGetFindingsListener = new GroupedActionListener<>(new ActionListener<>() { + ActionListener getFindingsResponseListener = new ActionListener<>() { @Override - public void onResponse(Collection responses) { + public void onResponse(GetFindingsResponse resp) { Integer totalFindings = 0; List findings = new ArrayList<>(); // Merge all findings into one response - for(GetFindingsResponse resp : responses) { - totalFindings += resp.getTotalFindings(); - findings.addAll(resp.getFindings()); - } + totalFindings += resp.getTotalFindings(); + findings.addAll(resp.getFindings()); + GetFindingsResponse masterResponse = new GetFindingsResponse( totalFindings, findings @@ -83,26 +82,26 @@ public void onFailure(Exception e) { log.error("Failed to fetch findings for detector " + detectorId, e); listener.onFailure(SecurityAnalyticsException.wrap(e)); } - }, monitorIds.size()); + }; // monitor --> detectorId mapping - Map monitorToDetectorMapping = new HashMap<>(); + Map monitorToDetectorMapping = new HashMap<>(); detector.getMonitorIds().forEach( - monitorId -> monitorToDetectorMapping.put(monitorId, detector.getId()) + monitorId -> monitorToDetectorMapping.put(monitorId, detector) ); // Get findings for all monitor ids FindingsService.this.getFindingsByMonitorIds( monitorToDetectorMapping, monitorIds, - DetectorMonitorConfig.getFindingsIndex(detector.getDetectorType()), + DetectorMonitorConfig.getAllFindingsIndicesPattern(detector.getDetectorType()), table, - multiGetFindingsListener + getFindingsResponseListener ); } @Override public void onFailure(Exception e) { - listener.onFailure(SecurityAnalyticsException.wrap(e)); + listener.onFailure(e); } }); } @@ -111,11 +110,12 @@ public void onFailure(Exception e) { * Searches findings generated by specific Monitor * @param monitorToDetectorMapping monitorId --> detectorId mapper * @param monitorIds id of Monitor + * @param findingIndexName Finding index name to search findings on * @param table group of search related parameters * @param listener ActionListener to get notified on response or error */ public void getFindingsByMonitorIds( - Map monitorToDetectorMapping, + Map monitorToDetectorMapping, List monitorIds, String findingIndexName, Table table, @@ -167,16 +167,16 @@ public void getFindings( ActionListener listener ) { if (detectors.size() == 0) { - throw SecurityAnalyticsException.wrap(new IllegalArgumentException("detector list is empty!")); + throw new OpenSearchStatusException("detector list is empty!", RestStatus.NOT_FOUND); } List allMonitorIds = new ArrayList<>(); // Used to convert monitorId back to detectorId to store in result FindingDto - Map monitorToDetectorMapping = new HashMap<>(); + Map monitorToDetectorMapping = new HashMap<>(); detectors.forEach(detector -> { // monitor --> detector map detector.getMonitorIds().forEach( - monitorId -> monitorToDetectorMapping.put(monitorId, detector.getId()) + monitorId -> monitorToDetectorMapping.put(monitorId, detector) ); // all monitorIds allMonitorIds.addAll(detector.getMonitorIds()); @@ -186,7 +186,7 @@ public void getFindings( FindingsService.this.getFindingsByMonitorIds( monitorToDetectorMapping, allMonitorIds, - DetectorMonitorConfig.getFindingsIndex(detectorType.getDetectorType()), + DetectorMonitorConfig.getAllFindingsIndicesPattern(detectorType.getDetectorType()), table, new ActionListener<>() { @Override @@ -204,13 +204,21 @@ public void onFailure(Exception e) { ); } - public FindingDto mapFindingWithDocsToFindingDto(FindingWithDocs findingWithDocs, String detectorId) { + public FindingDto mapFindingWithDocsToFindingDto(FindingWithDocs findingWithDocs, Detector detector) { + List docLevelQueries = findingWithDocs.getFinding().getDocLevelQueries(); + if (docLevelQueries.isEmpty()) { // this is finding generated by a bucket level monitor + for (Map.Entry entry : detector.getRuleIdMonitorIdMap().entrySet()) { + if(entry.getValue().equals(findingWithDocs.getFinding().getMonitorId())) { + docLevelQueries = Collections.singletonList(new DocLevelQuery(entry.getKey(),"","",Collections.emptyList())); + } + } + } return new FindingDto( - detectorId, + detector.getId(), findingWithDocs.getFinding().getId(), findingWithDocs.getFinding().getRelatedDocIds(), findingWithDocs.getFinding().getIndex(), - findingWithDocs.getFinding().getDocLevelQueries(), + docLevelQueries, findingWithDocs.getFinding().getTimestamp(), findingWithDocs.getDocuments() ); diff --git a/src/main/java/org/opensearch/securityanalytics/indexmanagment/DetectorIndexManagementService.java b/src/main/java/org/opensearch/securityanalytics/indexmanagment/DetectorIndexManagementService.java new file mode 100644 index 000000000..98fdc08e9 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/indexmanagment/DetectorIndexManagementService.java @@ -0,0 +1,561 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.indexmanagment; + +import com.carrotsearch.hppc.cursors.ObjectCursor; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.action.ActionListener; +import org.opensearch.action.admin.cluster.state.ClusterStateRequest; +import org.opensearch.action.admin.cluster.state.ClusterStateResponse; +import org.opensearch.action.admin.indices.delete.DeleteIndexRequest; +import org.opensearch.action.admin.indices.rollover.RolloverRequest; +import org.opensearch.action.admin.indices.rollover.RolloverResponse; +import org.opensearch.action.support.IndicesOptions; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.client.Client; +import org.opensearch.cluster.ClusterChangedEvent; +import org.opensearch.cluster.ClusterStateListener; +import org.opensearch.cluster.metadata.AliasMetadata; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.inject.Inject; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.securityanalytics.config.monitors.DetectorMonitorConfig; +import org.opensearch.securityanalytics.model.Detector; +import org.opensearch.threadpool.Scheduler; +import org.opensearch.threadpool.ThreadPool; + + +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ALERT_HISTORY_ENABLED; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ALERT_HISTORY_INDEX_MAX_AGE; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ALERT_HISTORY_MAX_DOCS; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ALERT_HISTORY_RETENTION_PERIOD; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ALERT_HISTORY_ROLLOVER_PERIOD; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.FINDING_HISTORY_ENABLED; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.FINDING_HISTORY_INDEX_MAX_AGE; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.FINDING_HISTORY_MAX_DOCS; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.FINDING_HISTORY_RETENTION_PERIOD; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.FINDING_HISTORY_ROLLOVER_PERIOD; + +public class DetectorIndexManagementService extends AbstractLifecycleComponent implements ClusterStateListener { + + private Logger logger = LogManager.getLogger(DetectorIndexManagementService.class); + + private final Client client; + private final ThreadPool threadPool; + private final ClusterService clusterService; + private Settings settings; + + private volatile Boolean alertHistoryEnabled; + private volatile Boolean findingHistoryEnabled; + + private volatile Long alertHistoryMaxDocs; + private volatile Long findingHistoryMaxDocs; + + private volatile TimeValue alertHistoryMaxAge; + private volatile TimeValue findingHistoryMaxAge; + + private volatile TimeValue alertHistoryRolloverPeriod; + private volatile TimeValue findingHistoryRolloverPeriod; + + private volatile TimeValue alertHistoryRetentionPeriod; + private volatile TimeValue findingHistoryRetentionPeriod; + + private volatile boolean isClusterManager = false; + + private Scheduler.Cancellable scheduledAlertsRollover = null; + private Scheduler.Cancellable scheduledFindingsRollover = null; + + List alertHistoryIndices = new ArrayList<>(); + List findingHistoryIndices = new ArrayList<>(); + + @Inject + public DetectorIndexManagementService(Settings settings, Client client, ThreadPool threadPool, ClusterService clusterService) { + this.settings = settings; + this.client = client; + this.threadPool = threadPool; + this.clusterService = clusterService; + + clusterService.addListener(this); + + clusterService.getClusterSettings().addSettingsUpdateConsumer(ALERT_HISTORY_ENABLED, this::setAlertHistoryEnabled); + clusterService.getClusterSettings().addSettingsUpdateConsumer(ALERT_HISTORY_MAX_DOCS, maxDocs -> { + setAlertHistoryMaxDocs(maxDocs); + for (HistoryIndexInfo h : alertHistoryIndices) { + h.maxDocs = maxDocs; + } + }); + clusterService.getClusterSettings().addSettingsUpdateConsumer(ALERT_HISTORY_INDEX_MAX_AGE, maxAge -> { + setAlertHistoryMaxAge(maxAge); + for (HistoryIndexInfo h : alertHistoryIndices) { + h.maxAge = maxAge; + } + }); + clusterService.getClusterSettings().addSettingsUpdateConsumer(ALERT_HISTORY_ROLLOVER_PERIOD, timeValue -> { + DetectorIndexManagementService.this.alertHistoryRolloverPeriod = timeValue; + rescheduleAlertRollover(); + }); + clusterService.getClusterSettings().addSettingsUpdateConsumer(ALERT_HISTORY_RETENTION_PERIOD, this::setAlertHistoryRetentionPeriod); + + clusterService.getClusterSettings().addSettingsUpdateConsumer(FINDING_HISTORY_ENABLED, this::setFindingHistoryEnabled); + clusterService.getClusterSettings().addSettingsUpdateConsumer(FINDING_HISTORY_MAX_DOCS, maxDocs -> { + setFindingHistoryMaxDocs(maxDocs); + for (HistoryIndexInfo h : findingHistoryIndices) { + h.maxDocs = maxDocs; + } + }); + clusterService.getClusterSettings().addSettingsUpdateConsumer(FINDING_HISTORY_INDEX_MAX_AGE, maxAge -> { + setFindingHistoryMaxAge(maxAge); + for (HistoryIndexInfo h : findingHistoryIndices) { + h.maxAge = maxAge; + } + }); + clusterService.getClusterSettings().addSettingsUpdateConsumer(FINDING_HISTORY_ROLLOVER_PERIOD, timeValue -> { + DetectorIndexManagementService.this.findingHistoryRolloverPeriod = timeValue; + rescheduleFindingRollover(); + }); + clusterService.getClusterSettings().addSettingsUpdateConsumer(FINDING_HISTORY_RETENTION_PERIOD, this::setFindingHistoryRetentionPeriod); + + initFromClusterSettings(); + + initAllIndexLists(); + } + + private void initAllIndexLists() { + Arrays.stream(Detector.DetectorType.values()).forEach( + detectorType -> { + + String alertsHistoryIndex = DetectorMonitorConfig.getAlertsHistoryIndex(detectorType.getDetectorType()); + String alertsHistoryIndexPattern = DetectorMonitorConfig.getAlertsHistoryIndexPattern(detectorType.getDetectorType()); + + alertHistoryIndices.add(new HistoryIndexInfo( + alertsHistoryIndex, + alertsHistoryIndexPattern, + alertMapping(), + alertHistoryMaxDocs, + alertHistoryMaxAge, + false + )); + + String findingsIndex = DetectorMonitorConfig.getFindingsIndex(detectorType.getDetectorType()); + String findingsIndexPattern = DetectorMonitorConfig.getFindingsIndexPattern(detectorType.getDetectorType()); + + findingHistoryIndices.add(new HistoryIndexInfo( + findingsIndex, + findingsIndexPattern, + findingMapping(), + findingHistoryMaxDocs, + findingHistoryMaxAge, + false + )); + }); + } + + private void initFromClusterSettings() { + alertHistoryEnabled = ALERT_HISTORY_ENABLED.get(settings); + findingHistoryEnabled = FINDING_HISTORY_ENABLED.get(settings); + alertHistoryMaxDocs = ALERT_HISTORY_MAX_DOCS.get(settings); + findingHistoryMaxDocs = FINDING_HISTORY_MAX_DOCS.get(settings); + alertHistoryMaxAge = ALERT_HISTORY_INDEX_MAX_AGE.get(settings); + findingHistoryMaxAge = FINDING_HISTORY_INDEX_MAX_AGE.get(settings); + alertHistoryRolloverPeriod = ALERT_HISTORY_ROLLOVER_PERIOD.get(settings); + findingHistoryRolloverPeriod = FINDING_HISTORY_ROLLOVER_PERIOD.get(settings); + alertHistoryRetentionPeriod = ALERT_HISTORY_RETENTION_PERIOD.get(settings); + findingHistoryRetentionPeriod = FINDING_HISTORY_RETENTION_PERIOD.get(settings); + } + + @Override + public void clusterChanged(ClusterChangedEvent event) { + // Instead of using a LocalNodeClusterManagerListener to track master changes, this service will + // track them here to avoid conditions where master listener events run after other + // listeners that depend on what happened in the master listener + if (this.isClusterManager != event.localNodeClusterManager()) { + this.isClusterManager = event.localNodeClusterManager(); + if (this.isClusterManager) { + onMaster(); + } else { + offMaster(); + } + } + for (HistoryIndexInfo h : alertHistoryIndices) { + h.isInitialized = event.state().metadata().hasAlias(h.indexAlias); + } + for (HistoryIndexInfo h : findingHistoryIndices) { + h.isInitialized = event.state().metadata().hasAlias(h.indexAlias); + } + } + + private void onMaster() { + try { + // try to rollover immediately as we might be restarting the cluster + rolloverAlertHistoryIndices(); + rolloverFindingHistoryIndices(); + // schedule the next rollover for approx MAX_AGE later + scheduledAlertsRollover = threadPool + .scheduleWithFixedDelay(() -> rolloverAndDeleteAlertHistoryIndices(), alertHistoryRolloverPeriod, executorName()); + scheduledFindingsRollover = threadPool + .scheduleWithFixedDelay(() -> rolloverAndDeleteFindingHistoryIndices(), findingHistoryRolloverPeriod, executorName()); + } catch (Exception e) { + // This should be run on cluster startup + logger.error( + "Error creating alert/finding indices. " + + "Alerts/Findings can't be recorded until master node is restarted.", + e + ); + } + } + + private void offMaster() { + if (scheduledAlertsRollover != null) { + scheduledAlertsRollover.cancel(); + } + if (scheduledFindingsRollover != null) { + scheduledFindingsRollover.cancel(); + } + } + + private String executorName() { + return ThreadPool.Names.MANAGEMENT; + } + + private void deleteOldIndices(String tag, String... indices) { + logger.error("info deleteOldIndices"); + ClusterStateRequest clusterStateRequest = new ClusterStateRequest() + .clear() + .indices(indices) + .metadata(true) + .local(true) + .indicesOptions(IndicesOptions.strictExpand()); + client.admin().cluster().state( + clusterStateRequest, + new ActionListener<>() { + @Override + public void onResponse(ClusterStateResponse clusterStateResponse) { + if (!clusterStateResponse.getState().metadata().getIndices().isEmpty()) { + List indicesToDelete = getIndicesToDelete(clusterStateResponse); + logger.info("Checking if we should delete " + tag + " indices: [" + indicesToDelete + "]"); + deleteAllOldHistoryIndices(indicesToDelete); + } else { + logger.info("No Old " + tag + " Indices to delete"); + } + } + + @Override + public void onFailure(Exception e) { + logger.error("Error fetching cluster state"); + } + } + ); + } + + private List getIndicesToDelete(ClusterStateResponse clusterStateResponse) { + List indicesToDelete = new ArrayList<>(); + for (ObjectCursor in : clusterStateResponse.getState().metadata().indices().values()) { + IndexMetadata indexMetaData = in.value; + String indexToDelete = getHistoryIndexToDelete(indexMetaData, alertHistoryRetentionPeriod.millis(), alertHistoryIndices, alertHistoryEnabled); + if (indexToDelete != null) { + indicesToDelete.add(indexToDelete); + } + indexToDelete = getHistoryIndexToDelete(indexMetaData, findingHistoryRetentionPeriod.millis(), findingHistoryIndices, findingHistoryEnabled); + if (indexToDelete != null) { + indicesToDelete.add(indexToDelete); + } + } + return indicesToDelete; + } + + private String getHistoryIndexToDelete( + IndexMetadata indexMetadata, + Long retentionPeriodMillis, + List historyIndices, + Boolean historyEnabled + ) { + long creationTime = indexMetadata.getCreationDate(); + if ((Instant.now().toEpochMilli() - creationTime) > retentionPeriodMillis) { + String alias = null; + for (ObjectCursor aliasMetadata : indexMetadata.getAliases().values()) { + Optional historyIndexInfoOptional = historyIndices + .stream() + .filter(e -> e.indexAlias.equals(aliasMetadata.value.alias())) + .findFirst(); + if (historyIndexInfoOptional.isPresent()) { + alias = historyIndexInfoOptional.get().indexAlias; + break; + } + } + if (alias != null) { + if (historyEnabled) { + // If the index has the write alias and history is enabled, don't delete the index + return null; + } + } + return indexMetadata.getIndex().getName(); + } + return null; + } + + private void deleteAllOldHistoryIndices(List indicesToDelete) { + if (indicesToDelete.size() > 0) { + DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(indicesToDelete.toArray(new String[0])); + client.admin().indices().delete( + deleteIndexRequest, + new ActionListener<>() { + @Override + public void onResponse(AcknowledgedResponse deleteIndicesResponse) { + if (!deleteIndicesResponse.isAcknowledged()) { + logger.error( + "Could not delete one or more Alerting/Finding history indices: [" + indicesToDelete + "]. Retrying one by one." + ); + deleteOldHistoryIndex(indicesToDelete); + } else { + logger.info("Succsessfuly deleted indices: [" + indicesToDelete + "]"); + } + } + + @Override + public void onFailure(Exception e) { + logger.error("Delete for Alerting/Finding History Indices failed: [" + indicesToDelete + "]. Retrying one By one."); + deleteOldHistoryIndex(indicesToDelete); + } + } + ); + } + } + + private void deleteOldHistoryIndex(List indicesToDelete) { + for (String index : indicesToDelete) { + final DeleteIndexRequest singleDeleteRequest = new DeleteIndexRequest(indicesToDelete.toArray(new String[0])); + + client.admin().indices().delete( + singleDeleteRequest, + new ActionListener<>() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) { + if (!acknowledgedResponse.isAcknowledged()) { + logger.error("Could not delete one or more Alerting/Finding history indices: " + index); + } + } + + @Override + public void onFailure(Exception e) { + logger.debug("Exception: [" + e.getMessage() + "] while deleting the index " + index); + } + } + ); + } + } + + private void rolloverAndDeleteAlertHistoryIndices() { + if (alertHistoryEnabled) rolloverAlertHistoryIndices(); + deleteOldIndices("Alert", DetectorMonitorConfig.getAllAlertsIndicesPatternForAllTypes().toArray(new String[0])); + } + + private void rolloverAndDeleteFindingHistoryIndices() { + if (findingHistoryEnabled) rolloverFindingHistoryIndices(); + deleteOldIndices("Finding", DetectorMonitorConfig.getAllFindingsIndicesPatternForAllTypes().toArray(new String[0])); + } + + private void rolloverIndex( + Boolean initialized, + String index, + String pattern, + String map, + Long docsCondition, + TimeValue ageCondition + ) { + if (!initialized) { + return; + } + + // We have to pass null for newIndexName in order to get Elastic to increment the index count. + RolloverRequest request = new RolloverRequest(index, null); + request.getCreateIndexRequest().index(pattern) + .mapping(map) + .settings(Settings.builder().put("index.hidden", true).build()); + request.addMaxIndexDocsCondition(docsCondition); + request.addMaxIndexAgeCondition(ageCondition); + client.admin().indices().rolloverIndex( + request, + new ActionListener<>() { + @Override + public void onResponse(RolloverResponse rolloverResponse) { + if (!rolloverResponse.isRolledOver()) { + logger.info(index + "not rolled over. Conditions were: " + rolloverResponse.getConditionStatus()); + } + } + + @Override + public void onFailure(Exception e) { + logger.error("rollover failed for index [" + index + "]."); + } + } + ); + } + + private void rolloverAlertHistoryIndices() { + for(HistoryIndexInfo h : alertHistoryIndices) { + rolloverIndex( + h.isInitialized, h.indexAlias, + h.indexPattern, h.indexMappings, + h.maxDocs, h.maxAge + ); + } + } + private void rolloverFindingHistoryIndices() { + for (HistoryIndexInfo h : findingHistoryIndices) { + rolloverIndex( + h.isInitialized, h.indexAlias, + h.indexPattern, h.indexMappings, + h.maxDocs, h.maxAge + ); + } + } + + private void rescheduleAlertRollover() { + if (clusterService.state().getNodes().isLocalNodeElectedMaster()) { + if (scheduledAlertsRollover != null) { + scheduledAlertsRollover.cancel(); + } + scheduledAlertsRollover = threadPool + .scheduleWithFixedDelay(() -> rolloverAndDeleteAlertHistoryIndices(), alertHistoryRolloverPeriod, executorName()); + } + } + + private void rescheduleFindingRollover() { + if (clusterService.state().getNodes().isLocalNodeElectedMaster()) { + if (scheduledFindingsRollover != null) { + scheduledFindingsRollover.cancel(); + } + scheduledFindingsRollover = threadPool + .scheduleWithFixedDelay(() -> rolloverAndDeleteFindingHistoryIndices(), findingHistoryRolloverPeriod, executorName()); + } + } + + private String alertMapping() { + String alertMapping = null; + try ( + InputStream is = DetectorIndexManagementService.class.getClassLoader().getResourceAsStream("mappings/alert_mapping.json") + ) { + alertMapping = new String(Objects.requireNonNull(is).readAllBytes(), StandardCharsets.UTF_8); + } catch (IOException e) { + logger.error(e.getMessage()); + } + return alertMapping; + } + + private String findingMapping() { + String findingMapping = null; + try ( + InputStream is = DetectorIndexManagementService.class.getClassLoader().getResourceAsStream("mappings/finding_mapping.json") + ) { + findingMapping = new String(Objects.requireNonNull(is).readAllBytes(), StandardCharsets.UTF_8); + } catch (IOException e) { + logger.error(e.getMessage()); + } + return findingMapping; + } + + // Setters + + public void setAlertHistoryEnabled(Boolean alertHistoryEnabled) { + this.alertHistoryEnabled = alertHistoryEnabled; + } + + public void setFindingHistoryEnabled(Boolean findingHistoryEnabled) { + this.findingHistoryEnabled = findingHistoryEnabled; + } + + public void setAlertHistoryMaxDocs(Long alertHistoryMaxDocs) { + this.alertHistoryMaxDocs = alertHistoryMaxDocs; + } + + public void setFindingHistoryMaxDocs(Long findingHistoryMaxDocs) { + this.findingHistoryMaxDocs = findingHistoryMaxDocs; + } + + public void setAlertHistoryMaxAge(TimeValue alertHistoryMaxAge) { + this.alertHistoryMaxAge = alertHistoryMaxAge; + } + + public void setFindingHistoryMaxAge(TimeValue findingHistoryMaxAge) { + this.findingHistoryMaxAge = findingHistoryMaxAge; + } + + public void setAlertHistoryRolloverPeriod(TimeValue alertHistoryRolloverPeriod) { + this.alertHistoryRolloverPeriod = alertHistoryRolloverPeriod; + } + + public void setFindingHistoryRolloverPeriod(TimeValue findingHistoryRolloverPeriod) { + this.findingHistoryRolloverPeriod = findingHistoryRolloverPeriod; + } + + public void setAlertHistoryRetentionPeriod(TimeValue alertHistoryRetentionPeriod) { + this.alertHistoryRetentionPeriod = alertHistoryRetentionPeriod; + } + + public void setFindingHistoryRetentionPeriod(TimeValue findingHistoryRetentionPeriod) { + this.findingHistoryRetentionPeriod = findingHistoryRetentionPeriod; + } + + public void setClusterManager(boolean clusterManager) { + isClusterManager = clusterManager; + } + + @Override + protected void doStart() { + + } + + @Override + protected void doStop() { + if (scheduledAlertsRollover != null) { + scheduledAlertsRollover.cancel(); + } + if (scheduledFindingsRollover != null) { + scheduledFindingsRollover.cancel(); + } + } + + @Override + protected void doClose() { + if (scheduledAlertsRollover != null) { + scheduledAlertsRollover.cancel(); + } + if (scheduledFindingsRollover != null) { + scheduledFindingsRollover.cancel(); + } + } + + private static class HistoryIndexInfo { + + String indexAlias; + String indexPattern; + String indexMappings; + Long maxDocs; + TimeValue maxAge; + boolean isInitialized; + + public HistoryIndexInfo(String indexAlias, String indexPattern, String indexMappings, Long maxDocs, TimeValue maxAge, boolean isInitialized) { + this.indexAlias = indexAlias; + this.indexPattern = indexPattern; + this.indexMappings = indexMappings; + this.maxDocs = maxDocs; + this.maxAge = maxAge; + this.isInitialized = isInitialized; + } + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/mapper/IndexTemplateManager.java b/src/main/java/org/opensearch/securityanalytics/mapper/IndexTemplateManager.java new file mode 100644 index 000000000..ecc684005 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/mapper/IndexTemplateManager.java @@ -0,0 +1,418 @@ +/* +Copyright OpenSearch Contributors +SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.mapper; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.action.ActionListener; +import org.opensearch.action.StepListener; +import org.opensearch.action.admin.indices.template.delete.DeleteComponentTemplateAction; +import org.opensearch.action.admin.indices.template.delete.DeleteComposableIndexTemplateAction; +import org.opensearch.action.admin.indices.template.put.PutComponentTemplateAction; +import org.opensearch.action.admin.indices.template.put.PutComposableIndexTemplateAction; +import org.opensearch.action.support.GroupedActionListener; +import org.opensearch.action.support.IndicesOptions; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.client.Client; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.ComponentTemplate; +import org.opensearch.cluster.metadata.ComposableIndexTemplate; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.metadata.MetadataIndexTemplateService; +import org.opensearch.cluster.metadata.Template; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.compress.CompressedXContent; +import org.opensearch.common.regex.Regex; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.securityanalytics.model.CreateMappingResult; +import org.opensearch.securityanalytics.util.DetectorUtils; +import org.opensearch.securityanalytics.util.IndexUtils; +import org.opensearch.securityanalytics.util.SecurityAnalyticsException; +import org.opensearch.securityanalytics.util.XContentUtils; + + +import static org.opensearch.securityanalytics.mapper.IndexTemplateUtils.computeComponentTemplateName; +import static org.opensearch.securityanalytics.mapper.IndexTemplateUtils.computeIndexTemplateName; +import static org.opensearch.securityanalytics.mapper.IndexTemplateUtils.copyTemplate; +import static org.opensearch.securityanalytics.mapper.IndexTemplateUtils.normalizeIndexName; + +public class IndexTemplateManager { + + private static final Logger log = LogManager.getLogger(IndexTemplateManager.class); + + public static String OPENSEARCH_SAP_COMPONENT_TEMPLATE_PREFIX = ".opensearch-sap-alias-mappings-component-"; + public static String OPENSEARCH_SAP_INDEX_TEMPLATE_PREFIX = ".opensearch-sap-alias-mappings-index-template-"; + + private Client client; + private ClusterService clusterService; + private IndexNameExpressionResolver indexNameExpressionResolver; + private NamedXContentRegistry xContentRegistry; + + public IndexTemplateManager(Client client, ClusterService clusterService, IndexNameExpressionResolver indexNameExpressionResolver, NamedXContentRegistry xContentRegistry) { + this.client = client; + this.clusterService = clusterService; + this.indexNameExpressionResolver = indexNameExpressionResolver; + this.xContentRegistry = xContentRegistry; + } + + public void upsertIndexTemplateWithAliasMappings( + String indexName, + Collection createMappingResults, + ActionListener actionListener + ) { + ClusterState state = this.clusterService.state(); + + if (IndexUtils.isConcreteIndex(indexName, state)) { + actionListener.onFailure(SecurityAnalyticsException.wrap( + new IllegalStateException("Can't upsert index template for concrete index!")) + ); + return; + } + + String concreteIndexName = IndexUtils.getWriteIndex(indexName, state); + if (concreteIndexName == null) { + String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, IndicesOptions.LENIENT_EXPAND_OPEN, indexName); + if (concreteIndices.length == 0) { + actionListener.onFailure(SecurityAnalyticsException.wrap( + new IllegalStateException("Can't upsert index template for concrete index!")) + ); + return; + } + concreteIndexName = IndexUtils.getNewestIndexByCreationDate(concreteIndices, state); + } + + // Get applied mappings for our concrete index of interest: writeIndex or newest(creation date) + final String cin = concreteIndexName; + Optional createMappingResult = + createMappingResults.stream() + .filter(e -> e.getConcreteIndexName().equals(cin)) + .findFirst(); + if (createMappingResult.isPresent() == false) { + actionListener.onFailure(SecurityAnalyticsException.wrap( + new IllegalStateException("Can't upsert index template for concrete index!")) + ); + return; + } + // Mappings applied to writeIndex or newest index + Map mappings = createMappingResult.get().getMappings(); + + StepListener upsertComponentTemplateStepListener = new StepListener<>(); + + // Upsert component template first + upsertComponentTemplate(indexName, client, state, mappings, upsertComponentTemplateStepListener); + + upsertComponentTemplateStepListener.whenComplete( acknowledgedResponse -> { + + // Find template which matches input index best + String templateName = + MetadataIndexTemplateService.findV2Template( + state.metadata(), + normalizeIndexName(indexName), + false + ); + + if (templateName == null) { + // If we find conflicting templates(regardless of priority) and that template was created by us, + // we will silently update index_pattern of that template. + // Otherwise, we will fail since we don't want to change index_pattern of user created index template + Map> conflictingTemplates = + MetadataIndexTemplateService.findConflictingV2Templates( + state, + computeIndexTemplateName(indexName), + List.of(computeIndexPattern(indexName)) + ); + // If there is 1 conflict which we own (SAP), we will update that template's index_pattern field + if (conflictingTemplates.size() == 1) { + String conflictingTemplateName = conflictingTemplates.keySet().iterator().next(); + if (conflictingTemplateName.startsWith(OPENSEARCH_SAP_INDEX_TEMPLATE_PREFIX)) { + templateName = conflictingTemplateName; + } else { + String errorMessage = "Found conflicting template: [" + conflictingTemplateName + "]"; + log.error(errorMessage); + actionListener.onFailure(SecurityAnalyticsException.wrap(new IllegalStateException(errorMessage))); + } + } else if (conflictingTemplates.size() > 1) { + String errorMessage = "Found conflicting templates: [" + + String.join(", ", conflictingTemplates.keySet()) + "]"; + log.error(errorMessage); + actionListener.onFailure(SecurityAnalyticsException.wrap(new IllegalStateException(errorMessage))); + return; + } + } + + + String componentName = computeComponentTemplateName(indexName); + + ComposableIndexTemplate template; + // if we didn't find existing template we will create a new one + if (templateName == null) { + template = new ComposableIndexTemplate( + List.of(computeIndexPattern(indexName)), + null, + List.of(componentName), + null, + null, + null + ); + templateName = computeIndexTemplateName(indexName); + // Create new ComposableIndexTemplate + upsertIndexTemplate( + client, + true, + template, + templateName, + actionListener + ); + } else { + // There is existing template which covers our index pattern. + // Check if we need to append our component to composedOf list + template = state.metadata().templatesV2().get(templateName); + if (template.composedOf().contains(componentName) == false) { + List newComposedOf = new ArrayList<>(template.composedOf()); + List indexPatterns = List.of(computeIndexPattern(indexName)); + ; + newComposedOf.add(componentName); + + try { + template = new ComposableIndexTemplate( + indexPatterns, + copyTemplate(template.template()), + newComposedOf, + template.priority(), + template.version(), + template.metadata(), + template.getDataStreamTemplate() + ); + // Update existing ComposableIndexTemplate + upsertIndexTemplate( + client, + false, + template, + templateName, + actionListener + ); + } catch (IOException e) { + log.error(e.getMessage()); + actionListener.onFailure(e); + } + } else { + actionListener.onResponse(new AcknowledgedResponse(true)); + } + } + + }, actionListener::onFailure); + + } + + private String computeIndexPattern(String indexName) { + return indexName.endsWith("*") == false ? indexName + "*" : indexName; + } + + private void upsertIndexTemplate( + Client client, + boolean create, + ComposableIndexTemplate indexTemplate, + String templateName, + ActionListener actionListener + ) { + + client.execute( + PutComposableIndexTemplateAction.INSTANCE, + new PutComposableIndexTemplateAction.Request(templateName) + .indexTemplate(indexTemplate) + .create(create), + new ActionListener<>() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) { + actionListener.onResponse(acknowledgedResponse); + } + + @Override + public void onFailure(Exception e) { + actionListener.onFailure(e); + } + } + ); + } + + private void upsertComponentTemplate( + String indexName, + Client client, + ClusterState state, + Map mappings, + ActionListener actionListener + ) { + + String componentName = computeComponentTemplateName(indexName); + boolean create = state.metadata().componentTemplates().containsKey(componentName) == false; + upsertComponentTemplate(componentName, create, client, mappings, new ActionListener<>() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) { + actionListener.onResponse(acknowledgedResponse); + } + + @Override + public void onFailure(Exception e) { + actionListener.onFailure(e); + } + }); + } + + private void upsertComponentTemplate( + String componentName, + boolean create, + Client client, + Map mappings, + ActionListener actionListener + ) { + try { + + String mappingsJson = XContentUtils.parseMapToJsonString(mappings); + + ComponentTemplate componentTemplate = new ComponentTemplate( + new Template(null, new CompressedXContent(mappingsJson), null), + 0L, + null + ); + PutComponentTemplateAction.Request req = + new PutComponentTemplateAction.Request(componentName) + .componentTemplate(componentTemplate) + .create(create); + + client.execute(PutComponentTemplateAction.INSTANCE, req, new ActionListener<>() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) { + actionListener.onResponse(acknowledgedResponse); + } + + @Override + public void onFailure(Exception e) { + actionListener.onFailure(e); + } + }); + } catch (IOException e) { + actionListener.onFailure(e); + } + } + + public void deleteAllUnusedTemplates(ActionListener actionListener) { + Map allSapTemplates = IndexTemplateUtils.getAllSapComposableIndexTemplates(clusterService.state()); + + StepListener> getDetectorInputsListener = new StepListener<>(); + + DetectorUtils.getAllDetectorInputs(client, xContentRegistry, getDetectorInputsListener); + + getDetectorInputsListener.whenComplete( allInputIndices -> { + + StepListener> doDeleteUnusedTemplatesListener = new StepListener<>(); + doDeleteUnusedTemplates(allSapTemplates, allInputIndices, doDeleteUnusedTemplatesListener); + + doDeleteUnusedTemplatesListener.whenComplete( deletedTemplates -> { + doDeleteUnusedComponentTemplates(actionListener); + actionListener.onResponse(null); + }, actionListener::onFailure); + + }, actionListener::onFailure); + } + + private void doDeleteUnusedComponentTemplates(ActionListener actionListener) { + Set componentTemplates = IndexTemplateUtils.getAllSapComponentTemplates(clusterService.state()); + // Start from set of ALL SAP Component Templates and remove each found in composableIndexTemplates. + // All component templates remaining in set are unused + clusterService.state().metadata().templatesV2().forEach( (name, template) -> + template.composedOf().forEach(componentTemplates::remove) + ); + // Nothing to delete + if (componentTemplates.size() == 0) { + actionListener.onResponse(null); + } + // Delete unused component templates + GroupedActionListener deleteMultipleComponentTemplatesListener = new GroupedActionListener(new ActionListener>() { + @Override + public void onResponse(Collection responses) { + actionListener.onResponse(null); + } + + @Override + public void onFailure(Exception e) { + actionListener.onFailure(e); + } + }, componentTemplates.size()); + + componentTemplates.forEach((name) -> + client.execute( + DeleteComponentTemplateAction.INSTANCE, + new DeleteComponentTemplateAction.Request(name), + deleteMultipleComponentTemplatesListener + ) + ); + } + + private void doDeleteUnusedTemplates( + Map allSapTemplates, + Set allDetectorInputIndices, + ActionListener> actionListener + ) { + Map toDeleteTemplates = new HashMap(); + Iterator templateIterator = allSapTemplates.entrySet().iterator(); + while (templateIterator.hasNext()) { + Map.Entry entry = (Map.Entry)templateIterator.next(); + String templateName = entry.getKey(); + ComposableIndexTemplate template = entry.getValue(); + + boolean matched = false; + for (String index : allDetectorInputIndices) { + // Skip concrete indices + if (IndexUtils.isConcreteIndex(index, clusterService.state())) { + continue; + } + // If any of index patterns of template matches input index, we can finish here and move to next template + if (template.indexPatterns().stream().anyMatch((pattern) -> Regex.simpleMatch(pattern, normalizeIndexName(index)))) { + matched = true; + break; + } + } + if (matched == false) { + toDeleteTemplates.put(templateName, template); + } + } + // Nothing to delete, just return + if (toDeleteTemplates.size() == 0) { + actionListener.onResponse(toDeleteTemplates); + return; + } + // Delete all found templates + GroupedActionListener deleteMultipleTemplatesListener = new GroupedActionListener(new ActionListener>() { + @Override + public void onResponse(Collection responses) { + actionListener.onResponse(toDeleteTemplates); + } + + @Override + public void onFailure(Exception e) { + actionListener.onFailure(e); + } + }, toDeleteTemplates.size()); + + toDeleteTemplates.forEach((name, template) -> + client.execute( + DeleteComposableIndexTemplateAction.INSTANCE, + new DeleteComposableIndexTemplateAction.Request(name), + deleteMultipleTemplatesListener + ) + ); + + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/mapper/IndexTemplateUtils.java b/src/main/java/org/opensearch/securityanalytics/mapper/IndexTemplateUtils.java new file mode 100644 index 000000000..26074dee3 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/mapper/IndexTemplateUtils.java @@ -0,0 +1,114 @@ +/* +Copyright OpenSearch Contributors +SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.mapper; + +import java.io.IOException; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.ComposableIndexTemplate; +import org.opensearch.cluster.metadata.Template; +import org.opensearch.common.bytes.BytesReference; +import org.opensearch.common.compress.CompressedXContent; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.common.xcontent.XContentHelper; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.xcontent.XContentBuilder; + +import static org.opensearch.index.mapper.MapperService.SINGLE_MAPPING_NAME; +import static org.opensearch.securityanalytics.mapper.IndexTemplateManager.OPENSEARCH_SAP_COMPONENT_TEMPLATE_PREFIX; +import static org.opensearch.securityanalytics.mapper.IndexTemplateManager.OPENSEARCH_SAP_INDEX_TEMPLATE_PREFIX; + +public class IndexTemplateUtils { + + + public static Set getAllSapComponentTemplates(ClusterState state) { + Set componentTemplates = new HashSet<>(); + + state.metadata().componentTemplates().forEach( (name, instance) -> { + if (name.startsWith(OPENSEARCH_SAP_COMPONENT_TEMPLATE_PREFIX)) { + componentTemplates.add(name); + } + }); + return componentTemplates; + } + + public static boolean isSapComposableIndexTemplate(String templateName, ComposableIndexTemplate template) { + // We don't ever set template field inside ComposableIndexTemplate and our template always starts with OPENSEARCH_SAP_INDEX_TEMPLATE_PREFIX + // If any of these is true, that means that user touched template and we're not owner of it anymore + if (templateName.startsWith(OPENSEARCH_SAP_INDEX_TEMPLATE_PREFIX) == false || template.template() != null) { + return false; + } + // If user added ComponentTemplate then this ComposableIndexTemplate is owned by user + for (String componentTemplate : template.composedOf()) { + if (componentTemplate.startsWith(OPENSEARCH_SAP_COMPONENT_TEMPLATE_PREFIX) == false) { + return false; + } + } + return true; + } + + public static Map getAllSapComposableIndexTemplates(ClusterState state) { + Map sapTemplates = new HashMap<>(); + + state.metadata().templatesV2().forEach( (name, instance) -> { + if (isSapComposableIndexTemplate(name, instance)) { + sapTemplates.put(name, instance); + } + }); + return sapTemplates; + } + + public static String computeIndexTemplateName(String indexName) { + return OPENSEARCH_SAP_INDEX_TEMPLATE_PREFIX + normalizeIndexName(indexName); + } + + public static String computeComponentTemplateName(String indexName) { + if (indexName.endsWith("*")) { + indexName = indexName.substring(0, indexName.length() - 1); + } + return OPENSEARCH_SAP_COMPONENT_TEMPLATE_PREFIX + normalizeIndexName(indexName); + } + + public static String normalizeIndexName(String indexName) { + if (indexName.endsWith("*")) { + return indexName.substring(0, indexName.length() - 1); + } else { + return indexName; + } + } + + public static boolean isUserCreatedComposableTemplate(String templateName) { + return templateName.startsWith(OPENSEARCH_SAP_INDEX_TEMPLATE_PREFIX) == false; + } + + public static Template copyTemplate(Template template) throws IOException { + + if (template == null) { + return null; + } + + CompressedXContent outMappings = null; + CompressedXContent mappings = template.mappings(); + if (mappings != null) { + Map mappingsAsMap = XContentHelper.convertToMap(mappings.compressedReference(), true, XContentType.JSON).v2(); + if (mappingsAsMap.containsKey(SINGLE_MAPPING_NAME)) { + mappingsAsMap = (Map)mappingsAsMap.get(SINGLE_MAPPING_NAME); + } + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.map(mappingsAsMap); + + outMappings = new CompressedXContent(BytesReference.bytes(builder)); + } + return new Template( + template.settings(), + outMappings, + template.aliases() + ); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/mapper/MapperService.java b/src/main/java/org/opensearch/securityanalytics/mapper/MapperService.java index bde9bd0e8..cc7958c9b 100644 --- a/src/main/java/org/opensearch/securityanalytics/mapper/MapperService.java +++ b/src/main/java/org/opensearch/securityanalytics/mapper/MapperService.java @@ -5,27 +5,40 @@ package org.opensearch.securityanalytics.mapper; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; import org.apache.commons.lang3.tuple.Pair; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.opensearch.OpenSearchStatusException; import org.opensearch.action.ActionListener; +import org.opensearch.action.admin.indices.get.GetIndexRequest; +import org.opensearch.action.admin.indices.get.GetIndexResponse; import org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.opensearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.opensearch.action.support.GroupedActionListener; import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.client.Client; import org.opensearch.client.IndicesAdminClient; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.metadata.MappingMetadata; +import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.collect.ImmutableOpenMap; -import org.opensearch.common.xcontent.XContentType; +import org.opensearch.rest.RestStatus; import org.opensearch.securityanalytics.action.GetIndexMappingsResponse; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; import org.opensearch.securityanalytics.action.GetMappingsViewResponse; +import org.opensearch.securityanalytics.model.CreateMappingResult; +import org.opensearch.securityanalytics.util.IndexUtils; +import org.opensearch.securityanalytics.util.SecurityAnalyticsException; import static org.opensearch.securityanalytics.mapper.MapperUtils.PATH; @@ -35,16 +48,18 @@ public class MapperService { private static final Logger log = LogManager.getLogger(MapperService.class); - IndicesAdminClient indicesClient; + private ClusterService clusterService; + private IndicesAdminClient indicesClient; + private IndexNameExpressionResolver indexNameExpressionResolver; + private IndexTemplateManager indexTemplateManager; public MapperService() {} - public MapperService(IndicesAdminClient indicesClient) { - this.indicesClient = indicesClient; - } - - void setIndicesAdminClient(IndicesAdminClient client) { - this.indicesClient = client; + public MapperService(Client client, ClusterService clusterService, IndexNameExpressionResolver indexNameExpressionResolver, IndexTemplateManager indexTemplateManager) { + this.indicesClient = client.admin().indices(); + this.clusterService = clusterService; + this.indexNameExpressionResolver = indexNameExpressionResolver; + this.indexTemplateManager = indexTemplateManager; } public void createMappingAction(String indexName, String ruleTopic, boolean partial, ActionListener actionListener) { @@ -53,11 +68,49 @@ public void createMappingAction(String indexName, String ruleTopic, boolean part public void createMappingAction(String indexName, String ruleTopic, String aliasMappings, boolean partial, ActionListener actionListener) { - GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices(indexName); + // If indexName is Datastream it is enough to apply mappings to writeIndex only + // since you can't update documents in non-write indices + String index = indexName; + boolean shouldUpsertIndexTemplate = IndexUtils.isConcreteIndex(indexName, this.clusterService.state()) == false; + if (IndexUtils.isDataStream(indexName, this.clusterService.state()) || IndexUtils.isAlias(indexName, this.clusterService.state())) { + log.debug("{} is an alias or datastream. Fetching write index for create mapping action.", indexName); + String writeIndex = IndexUtils.getWriteIndex(indexName, this.clusterService.state()); + if (writeIndex != null) { + log.debug("Write index for {} is {}", indexName, writeIndex); + index = writeIndex; + } + } + + GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices(index); indicesClient.getMappings(getMappingsRequest, new ActionListener<>() { @Override public void onResponse(GetMappingsResponse getMappingsResponse) { - createMappingActionContinuation(getMappingsResponse.getMappings(), ruleTopic, aliasMappings, partial, actionListener); + applyAliasMappings(getMappingsResponse.getMappings(), ruleTopic, aliasMappings, partial, new ActionListener<>() { + @Override + public void onResponse(Collection createMappingResponse) { + log.debug("Completed create mappings for {}", indexName); + // We will return ack==false if one of the requests returned that + // else return ack==true + Optional notAckd = createMappingResponse.stream() + .map(e -> e.getAcknowledgedResponse()) + .filter(e -> e.isAcknowledged() == false).findFirst(); + AcknowledgedResponse ack = new AcknowledgedResponse( + notAckd.isPresent() ? false : true + ); + + if (shouldUpsertIndexTemplate) { + indexTemplateManager.upsertIndexTemplateWithAliasMappings(indexName, createMappingResponse, actionListener); + } else { + actionListener.onResponse(ack); + } + } + + @Override + public void onFailure(Exception e) { + log.debug("Failed to create mappings for {}", indexName ); + actionListener.onFailure(e); + } + }); } @Override @@ -67,12 +120,52 @@ public void onFailure(Exception e) { }); } - private void createMappingActionContinuation(ImmutableOpenMap indexMappings, String ruleTopic, String aliasMappings, boolean partial, ActionListener actionListener) { + private void applyAliasMappings(ImmutableOpenMap indexMappings, String ruleTopic, String aliasMappings, boolean partial, ActionListener> actionListener) { + int numOfIndices = indexMappings.size(); + + GroupedActionListener doCreateMappingActionsListener = new GroupedActionListener(new ActionListener>() { @Override + public void onResponse(Collection response) { + actionListener.onResponse(response); + } + + @Override + public void onFailure(Exception e) { + actionListener.onFailure( + new SecurityAnalyticsException( + "Failed applying mappings to index", RestStatus.INTERNAL_SERVER_ERROR, e + ) + ); + } + }, numOfIndices); + + indexMappings.forEach(iter -> { + String indexName = iter.key; + MappingMetadata mappingMetadata = iter.value; + // Try to apply mapping to index + doCreateMapping(indexName, mappingMetadata, ruleTopic, aliasMappings, partial, doCreateMappingActionsListener); + }); + } + + /** + * Applies alias mappings to index. + * @param indexName Index name + * @param mappingMetadata Index mappings + * @param ruleTopic Rule topic spcifying specific alias templates + * @param aliasMappings User-supplied alias mappings + * @param partial Partial flag indicating if we should apply mappings partially, in case source index doesn't have all paths specified in alias mappings + * @param actionListener actionListener used to return response/error + */ + private void doCreateMapping( + String indexName, + MappingMetadata mappingMetadata, + String ruleTopic, + String aliasMappings, + boolean partial, + ActionListener actionListener + ) { - PutMappingRequest request; try { - String indexName = indexMappings.iterator().next().key; String aliasMappingsJSON; // aliasMappings parameter has higher priority then ruleTopic if (aliasMappings != null) { @@ -81,7 +174,9 @@ private void createMappingActionContinuation(ImmutableOpenMap missingPathsInIndex = MapperUtils.validateIndexMappings(indexMappings, aliasMappingsJSON); + Pair, List> validationResult = MapperUtils.validateIndexMappings(indexName, mappingMetadata, aliasMappingsJSON); + List missingPathsInIndex = validationResult.getLeft(); + List presentPathsInIndex = validationResult.getRight(); if(missingPathsInIndex.size() > 0) { // If user didn't allow partial apply, we should error out here @@ -92,25 +187,35 @@ private void createMappingActionContinuation(ImmutableOpenMap> pathsToSkip = - missingPathsInIndex.stream() - .map(e -> Pair.of(PATH, e)) - .collect(Collectors.toList()); - MappingsTraverser mappingsTraverser = new MappingsTraverser(aliasMappingsJSON, pathsToSkip); - Map filteredMappings = mappingsTraverser.traverseAndShallowCopy(); - - request = new PutMappingRequest(indexName).source(filteredMappings); - } else { - request = new PutMappingRequest(indexName).source( - aliasMappingsJSON, XContentType.JSON - ); } + // Filter out mappings of sourceIndex fields to which we're applying alias mappings + Map presentPathsMappings = MapperUtils.getFieldMappingsFlat(mappingMetadata, presentPathsInIndex); + // Filtered alias mappings -- contains only aliases which are applicable to index: + // 1. fields in path params exists in index + // 2. alias isn't named as one of existing fields in index + Map filteredAliasMappings = filterNonApplicableAliases( + mappingMetadata, + missingPathsInIndex, + aliasMappingsJSON + ); + Map allMappings = new HashMap<>(presentPathsMappings); + allMappings.putAll((Map) filteredAliasMappings.get(PROPERTIES)); + + Map mappingsRoot = new HashMap<>(); + mappingsRoot.put(PROPERTIES, allMappings); + // Apply mappings to sourceIndex + PutMappingRequest request = new PutMappingRequest(indexName).source(filteredAliasMappings); indicesClient.putMapping(request, new ActionListener<>() { @Override public void onResponse(AcknowledgedResponse acknowledgedResponse) { - actionListener.onResponse(acknowledgedResponse); + //((Map)mappingsRoot.get(PROPERTIES)).putAll(presentPathsMappings); + CreateMappingResult result = new CreateMappingResult( + acknowledgedResponse, + indexName, + mappingsRoot + ); + actionListener.onResponse(result); } @Override @@ -123,6 +228,45 @@ public void onFailure(Exception e) { } } + private Map filterNonApplicableAliases( + MappingMetadata indexMappingMetadata, + List missingPathsInIndex, + String aliasMappingsJSON + ) throws IOException { + // Parse aliasMappings JSON into Map + MappingsTraverser mappingsTraverser = new MappingsTraverser(aliasMappingsJSON, Set.of()); + Map filteredAliasMappings = mappingsTraverser.traverseAndCopyAsFlat(); + + List> propertiesToSkip = new ArrayList<>(); + if(missingPathsInIndex.size() > 0) { + // Filter out missing paths from alias mappings so that our PutMappings request succeeds + propertiesToSkip.addAll( + missingPathsInIndex.stream() + .map(e -> Pair.of(PATH, e)) + .collect(Collectors.toList()) + ); + } + // Filter out all aliases which name already exists as field in index mappings + List nonAliasIndexFields = MapperUtils.getAllNonAliasFieldsFromIndex(indexMappingMetadata); + List aliasFields = MapperUtils.getAllAliases(aliasMappingsJSON); + Set aliasesToInclude = + aliasFields.stream() + .filter(e -> nonAliasIndexFields.contains(e) == false) + .collect(Collectors.toSet()); + + boolean excludeSomeAliases = aliasesToInclude.size() < aliasFields.size(); + // check if we need to filter out some properties/nodes in alias mapping + if (propertiesToSkip.size() > 0 || excludeSomeAliases) { + mappingsTraverser = new MappingsTraverser(aliasMappingsJSON, propertiesToSkip); + if (aliasesToInclude.size() > 0) { + filteredAliasMappings = mappingsTraverser.traverseAndCopyWithFilter(aliasesToInclude); + } else { + filteredAliasMappings = mappingsTraverser.traverseAndCopyAsFlat(); + } + } + return filteredAliasMappings; + } + public void updateMappingAction(String indexName, String field, String alias, ActionListener actionListener) { PutMappingRequest request = new PutMappingRequest(indexName).source(field, alias); indicesClient.putMapping(request, new ActionListener<>() { @@ -139,16 +283,36 @@ public void onFailure(Exception e) { } public void getMappingAction(String indexName, ActionListener actionListener) { - GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices(indexName); + try { + // We are returning mappings view for only 1 index: writeIndex or latest from the pattern + resolveConcreteIndex(indexName, new ActionListener<>() { + @Override + public void onResponse(String concreteIndex) { + doGetMappingAction(indexName, concreteIndex, actionListener); + } + + @Override + public void onFailure(Exception e) { + actionListener.onFailure(e); + } + }); + + + } catch (IOException e) { + throw SecurityAnalyticsException.wrap(e); + } + } + + public void doGetMappingAction(String indexName, String concreteIndexName, ActionListener actionListener) { + GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices(concreteIndexName); indicesClient.getMappings(getMappingsRequest, new ActionListener<>() { @Override public void onResponse(GetMappingsResponse getMappingsResponse) { try { - // Extract indexName and MappingMetadata - String indexName = getMappingsResponse.mappings().iterator().next().key; + // Extract MappingMetadata MappingMetadata mappingMetadata = getMappingsResponse.mappings().iterator().next().value; // List of all found applied aliases on index - List appliedAliases = new ArrayList<>(); + Set appliedAliases = new HashSet<>(); // Get list of alias -> path pairs from index mappings List> indexAliasPathPairs = MapperUtils.getAllAliasPathPairs(mappingMetadata); @@ -168,43 +332,16 @@ public void onResponse(GetMappingsResponse getMappingsResponse) { } } } - // If we found all aliases we can stop searching further - if (indexAliasPathPairs.size() == appliedAliases.size()) { - break; - } - } - // Traverse mappings and do copy with excluded type=alias properties - MappingsTraverser mappingsTraverser = new MappingsTraverser(mappingMetadata); - // Resulting properties after filtering - Map filteredProperties = new HashMap<>(); - - mappingsTraverser.addListener(new MappingsTraverser.MappingsTraverserListener() { - @Override - public void onLeafVisited(MappingsTraverser.Node node) { - // Skip everything except aliases we found - if (appliedAliases.contains(node.currentPath) == false) { - return; - } - MappingsTraverser.Node n = node; - while (n.parent != null) { - n = n.parent; - } - if (n == null) { - n = node; - } - filteredProperties.put(n.getNodeName(), n.getProperties()); - } - @Override - public void onError(String error) { - throw new IllegalArgumentException(""); - } - }); - mappingsTraverser.traverse(); + // Traverse mappings and do copy with excluded type=alias properties + MappingsTraverser mappingsTraverser = new MappingsTraverser(mappingMetadata); + // Resulting mapping after filtering + Map filteredMapping = mappingsTraverser.traverseAndCopyWithFilter(appliedAliases); + + // Construct filtered mappings and return them as result ImmutableOpenMap.Builder outIndexMappings = ImmutableOpenMap.builder(); - Map outRootProperties = Map.of(PROPERTIES, filteredProperties); - Map root = Map.of(org.opensearch.index.mapper.MapperService.SINGLE_MAPPING_NAME, outRootProperties); + Map root = Map.of(org.opensearch.index.mapper.MapperService.SINGLE_MAPPING_NAME, filteredMapping); MappingMetadata outMappingMetadata = new MappingMetadata(org.opensearch.index.mapper.MapperService.SINGLE_MAPPING_NAME, root); outIndexMappings.put(indexName, outMappingMetadata); @@ -225,7 +362,34 @@ public void getMappingsViewAction( String mapperTopic, ActionListener actionListener ) { - GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices(indexName); + try { + // We are returning mappings view for only 1 index: writeIndex or latest from the pattern + resolveConcreteIndex(indexName, new ActionListener<>() { + @Override + public void onResponse(String concreteIndex) { + doGetMappingsView(mapperTopic, actionListener, concreteIndex); + } + + @Override + public void onFailure(Exception e) { + actionListener.onFailure(e); + } + }); + + + } catch (IOException e) { + throw SecurityAnalyticsException.wrap(e); + } + } + + /** + * Constructs Mappings View of index + * @param mapperTopic Mapper Topic describing set of alias mappings + * @param actionListener Action Listener + * @param concreteIndex Concrete Index name for which we're computing Mappings View + */ + private void doGetMappingsView(String mapperTopic, ActionListener actionListener, String concreteIndex) { + GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices(concreteIndex); indicesClient.getMappings(getMappingsRequest, new ActionListener<>() { @Override public void onResponse(GetMappingsResponse getMappingsResponse) { @@ -252,7 +416,8 @@ public void onResponse(GetMappingsResponse getMappingsResponse) { // Maintain list of found paths in index applyableAliases.add(alias); pathsOfApplyableAliases.add(path); - } else { + } else if (allFieldsFromIndex.contains(alias) == false) { + // we don't want to send back aliases which have same name as existing field in index unmappedFieldAliases.add(alias); } } @@ -278,4 +443,60 @@ public void onFailure(Exception e) { } }); } + + /** + * Given index name, resolves it to single concrete index, depending on what initial indexName is. + * In case of Datastream or Alias, WriteIndex would be returned. In case of index pattern, newest index by creation date would be returned. + * @param indexName Datastream, Alias, index patter or concrete index + * @param actionListener Action Listener + * @throws IOException + */ + private void resolveConcreteIndex(String indexName, ActionListener actionListener) throws IOException { + + indicesClient.getIndex((new GetIndexRequest()).indices(indexName), new ActionListener<>() { + @Override + public void onResponse(GetIndexResponse getIndexResponse) { + String[] indices = getIndexResponse.indices(); + if (indices.length == 0) { + actionListener.onFailure( + SecurityAnalyticsException.wrap( + new IllegalArgumentException("Invalid index name: [" + indexName + "]") + ) + ); + } else if (indices.length == 1) { + actionListener.onResponse(indices[0]); + } else if (indices.length > 1) { + String writeIndex = IndexUtils.getWriteIndex(indexName, MapperService.this.clusterService.state()); + if (writeIndex != null) { + actionListener.onResponse(writeIndex); + } else { + actionListener.onResponse( + IndexUtils.getNewestIndexByCreationDate(indices, MapperService.this.clusterService.state()) + ); + } + } + } + + @Override + public void onFailure(Exception e) { + actionListener.onFailure(e); + } + }); + + } + + void setIndicesAdminClient(IndicesAdminClient client) { + this.indicesClient = client; + } + void setClusterService(ClusterService clusterService) { + this.clusterService = clusterService; + } + + public void setIndexNameExpressionResolver(IndexNameExpressionResolver indexNameExpressionResolver) { + this.indexNameExpressionResolver = indexNameExpressionResolver; + } + + public void setIndexTemplateManager(IndexTemplateManager indexTemplateManager) { + this.indexTemplateManager = indexTemplateManager; + } } \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/mapper/MapperTopicStore.java b/src/main/java/org/opensearch/securityanalytics/mapper/MapperTopicStore.java index 1b46df14d..d2f399917 100644 --- a/src/main/java/org/opensearch/securityanalytics/mapper/MapperTopicStore.java +++ b/src/main/java/org/opensearch/securityanalytics/mapper/MapperTopicStore.java @@ -9,6 +9,7 @@ import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.util.HashMap; +import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.stream.Collectors; @@ -54,11 +55,11 @@ private MapperTopicStore() { } public static String aliasMappings(String mapperTopic) throws IOException { - if (INSTANCE.mapperMap.containsKey(mapperTopic)) { + if (INSTANCE.mapperMap.containsKey(mapperTopic.toLowerCase(Locale.ROOT))) { return new String(Objects.requireNonNull( INSTANCE.getClass().getClassLoader().getResourceAsStream(INSTANCE. - mapperMap.get(mapperTopic))).readAllBytes(), + mapperMap.get(mapperTopic.toLowerCase(Locale.ROOT)))).readAllBytes(), StandardCharsets.UTF_8); } throw new IllegalArgumentException("Mapper not found: [" + mapperTopic + "]"); diff --git a/src/main/java/org/opensearch/securityanalytics/mapper/MapperUtils.java b/src/main/java/org/opensearch/securityanalytics/mapper/MapperUtils.java index 421693a9a..857a144a9 100644 --- a/src/main/java/org/opensearch/securityanalytics/mapper/MapperUtils.java +++ b/src/main/java/org/opensearch/securityanalytics/mapper/MapperUtils.java @@ -5,17 +5,17 @@ package org.opensearch.securityanalytics.mapper; -import java.util.HashMap; -import java.util.Map; import org.apache.commons.lang3.tuple.Pair; import org.opensearch.cluster.metadata.MappingMetadata; -import org.opensearch.common.collect.ImmutableOpenMap; +import org.opensearch.securityanalytics.util.SecurityAnalyticsException; import java.io.IOException; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Locale; +import java.util.Map; import java.util.Set; -import java.util.stream.Collectors; public class MapperUtils { @@ -25,6 +25,29 @@ public class MapperUtils { public static final String ALIAS = "alias"; public static final String NESTED = "nested"; + public static List getAllAliases(String aliasMappingsJson) throws IOException { + MappingsTraverser mappingsTraverser = new MappingsTraverser(aliasMappingsJson, Set.of()); + List aliasFields = new ArrayList<>(); + mappingsTraverser.addListener(new MappingsTraverser.MappingsTraverserListener() { + @Override + public void onLeafVisited(MappingsTraverser.Node node) { + // We'll ignore any irregularities in alias mappings here + if (node.getProperties().containsKey(PATH) == false || + node.getProperties().get(TYPE).equals(ALIAS) == false) { + return; + } + aliasFields.add(node.currentPath); + } + + @Override + public void onError(String error) { + throw new IllegalArgumentException(error); + } + }); + mappingsTraverser.traverse(); + return aliasFields; + } + public static List> getAllAliasPathPairs(String aliasMappingsJson) throws IOException { MappingsTraverser mappingsTraverser = new MappingsTraverser(aliasMappingsJson, Set.of()); return getAllAliasPathPairs(mappingsTraverser); @@ -89,29 +112,75 @@ public void onError(String error) { *
  • Alias mappings have to have property type=alias and path property has to exist *
  • Paths from alias mappings should exists in index mappings * - * @param indexMappings Index Mappings to which alias mappings will be applied - * @param aliasMappingsJSON Alias Mappings as JSON string - * @return list of alias mappings paths which are missing in index mappings + * @param indexName Source index name + * @param mappingMetadata Source index mapping to which alias mappings will be applied + * @param aliasMappingsJSON Alias mappings as JSON string + * @return Pair of list of alias mappings paths which are missing in index mappings and list of * */ - public static List validateIndexMappings(ImmutableOpenMap indexMappings, String aliasMappingsJSON) throws IOException { - + public static Pair, List> validateIndexMappings(String indexName, MappingMetadata mappingMetadata, String aliasMappingsJSON) throws IOException { // Check if index's mapping is empty - if (isIndexMappingsEmpty(indexMappings)) { - throw new IllegalArgumentException("Index mappings are empty"); + if (isIndexMappingsEmpty(mappingMetadata)) { + throw new IllegalArgumentException(String.format(Locale.ROOT, "Mappings for index [%s] are empty", indexName)); } // Get all paths (field names) to which we're going to apply aliases List paths = getAllPathsFromAliasMappings(aliasMappingsJSON); // Traverse Index Mappings and extract all fields(paths) - String indexName = indexMappings.iterator().next().key; - MappingMetadata mappingMetadata = indexMappings.get(indexName); - List flatFields = getAllNonAliasFieldsFromIndex(mappingMetadata); // Return list of paths from Alias Mappings which are missing in Index Mappings - return paths.stream() - .filter(e -> !flatFields.contains(e)) - .collect(Collectors.toList()); + List missingPaths = new ArrayList<>(); + List presentPaths = new ArrayList<>(); + paths.stream().forEach(e -> { + if (flatFields.contains(e)) presentPaths.add(e); + else missingPaths.add(e); + }); + return Pair.of(missingPaths, presentPaths); + } + + /** + * Traverses mappings tree and collects all fields. + * Nested fields are flattened. + * @return list of fields in mappings. + */ + public static List extractAllFieldsFlat(MappingMetadata mappingMetadata) { + MappingsTraverser mappingsTraverser = new MappingsTraverser(mappingMetadata); + List flatProperties = new ArrayList<>(); + // Setup + mappingsTraverser.addListener(new MappingsTraverser.MappingsTraverserListener() { + @Override + public void onLeafVisited(MappingsTraverser.Node node) { + flatProperties.add(node.currentPath); + } + + @Override + public void onError(String error) { + throw new IllegalArgumentException(error); + } + }); + // Do traverse + mappingsTraverser.traverse(); + return flatProperties; + } + + public static List extractAllFieldsFlat(Map mappingsMap) { + MappingsTraverser mappingsTraverser = new MappingsTraverser(mappingsMap, Set.of()); + List flatProperties = new ArrayList<>(); + // Setup + mappingsTraverser.addListener(new MappingsTraverser.MappingsTraverserListener() { + @Override + public void onLeafVisited(MappingsTraverser.Node node) { + flatProperties.add(node.currentPath); + } + + @Override + public void onError(String error) { + throw new IllegalArgumentException(error); + } + }); + // Do traverse + mappingsTraverser.traverse(); + return flatProperties; } public static List getAllNonAliasFieldsFromIndex(MappingMetadata mappingMetadata) { @@ -119,11 +188,8 @@ public static List getAllNonAliasFieldsFromIndex(MappingMetadata mapping return mappingsTraverser.extractFlatNonAliasFields(); } - public static boolean isIndexMappingsEmpty(ImmutableOpenMap indexMappings) { - if (indexMappings.iterator().hasNext()) { - return indexMappings.iterator().next().value.getSourceAsMap().size() == 0; - } - throw new IllegalArgumentException("Invalid Index Mappings"); + public static boolean isIndexMappingsEmpty(MappingMetadata mappingMetadata) { + return mappingMetadata.getSourceAsMap().size() == 0; } public static Map getAliasMappingsWithFilter( @@ -161,4 +227,27 @@ public void onError(String error) { // Construct filtered mappings with PROPERTIES as root and return them as result return Map.of(PROPERTIES, filteredProperties); } + + public static Map getFieldMappingsFlat(MappingMetadata mappingMetadata, List fieldPaths) { + Map presentPathsMappings = new HashMap<>(); + MappingsTraverser mappingsTraverser = new MappingsTraverser(mappingMetadata); + mappingsTraverser.addListener(new MappingsTraverser.MappingsTraverserListener() { + @Override + public void onLeafVisited(MappingsTraverser.Node node) { + if (fieldPaths.contains(node.currentPath)) { + presentPathsMappings.put(node.currentPath, node.getProperties()); + } + } + + @Override + public void onError(String error) { + throw SecurityAnalyticsException.wrap( + new IllegalArgumentException("Failed traversing index mappings: [" + error + "]") + ); + } + }); + mappingsTraverser.traverse(); + + return presentPathsMappings; + } } diff --git a/src/main/java/org/opensearch/securityanalytics/mapper/MappingsTraverser.java b/src/main/java/org/opensearch/securityanalytics/mapper/MappingsTraverser.java index 26b6aaecf..67a9f0e67 100644 --- a/src/main/java/org/opensearch/securityanalytics/mapper/MappingsTraverser.java +++ b/src/main/java/org/opensearch/securityanalytics/mapper/MappingsTraverser.java @@ -5,20 +5,23 @@ package org.opensearch.securityanalytics.mapper; +import java.util.LinkedHashMap; +import java.util.ListIterator; import org.apache.commons.lang3.tuple.Pair; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.opensearch.cluster.metadata.MappingMetadata; -import org.opensearch.common.xcontent.DeprecationHandler; -import org.opensearch.common.xcontent.NamedXContentRegistry; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.index.mapper.MapperService; -import org.opensearch.securityanalytics.rules.condition.ConditionListener; +import org.opensearch.core.xcontent.DeprecationHandler; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.HashSet; import java.util.HashMap; @@ -36,6 +39,8 @@ */ public class MappingsTraverser { + private static final Logger log = LogManager.getLogger(MappingsTraverser.class); + /** * Traverser listener used to process leaves */ @@ -120,14 +125,6 @@ public void addListener(MappingsTraverserListener l) { this.mappingsTraverserListeners.add(l); } - /** - * Sets set of property "type" values to skip during traversal. - * @param types Set of strings representing property "type" - */ - public void setTypesToSkip(Set types) { - this.typesToSkip = types; - } - /** * Traverses mappings tree and collects all fields that are not of type "alias". * Nested fields are flattened. @@ -136,7 +133,7 @@ public void setTypesToSkip(Set types) { public List extractFlatNonAliasFields() { List flatProperties = new ArrayList<>(); // Setup - this.typesToSkip.add(ALIAS); + this.propertiesToSkip.add(Pair.of(TYPE, ALIAS)); this.mappingsTraverserListeners.add(new MappingsTraverserListener() { @Override public void onLeafVisited(Node node) { @@ -162,7 +159,10 @@ public void traverse() { try { Map rootProperties = (Map) this.mappingsMap.get(PROPERTIES); - rootProperties.forEach((k, v) -> nodeStack.push(new Node(Map.of(k, v), ""))); + + if (Objects.nonNull(rootProperties)) { + rootProperties.forEach((k, v) -> nodeStack.push(new Node(Map.of(k, v), null, rootProperties, "", ""))); + } while (nodeStack.size() > 0) { Node node = nodeStack.pop(); @@ -190,7 +190,7 @@ public void traverse() { node.currentPath.length() > 0 ? node.currentPath + "." + currentNodeName : currentNodeName; - nodeStack.push(new Node(Map.of(k, v), node, currentPath)); + nodeStack.push(new Node(Map.of(k, v), node, children, currentNodeName, currentPath)); }); } } @@ -198,6 +198,7 @@ public void traverse() { // This is coming from listeners. throw e; } catch (Exception e) { + log.error("Error traversing mappings tree", e); notifyError("Error traversing mappings tree"); } } @@ -219,27 +220,84 @@ private boolean shouldSkipNode(Map properties) { return false; } + public Map traverseAndCopyWithFilter(Set nodePathsToCopy) { + + Map outRoot = new LinkedHashMap<>(Map.of(PROPERTIES, new LinkedHashMap())); + this.addListener(new MappingsTraverserListener() { + @Override + public void onLeafVisited(Node node) { + if (nodePathsToCopy.contains(node.currentPath) == false) { + return; + } + // Collect all nodes from root to this leaf. + List nodes = new ArrayList<>(); + Node n = node; + nodes.add(n); + while (n.parent != null) { + n = n.parent; + nodes.add(n); + } + // Iterate from root node up to this leaf and copy node in each iteration to "out" tree + ListIterator nodesIterator = nodes.listIterator(nodes.size()); + Map outNode = outRoot; + while (nodesIterator.hasPrevious()) { + Node currentNode = nodesIterator.previous(); + + appendNode(currentNode, outNode, !nodesIterator.hasPrevious()); + // Move to next output node + outNode = (Map) ((Map) outNode.get(PROPERTIES)).get(currentNode.getNodeName()); + } + } + + @Override + public void onError(String error) { + throw new IllegalArgumentException(""); + } + }); + traverse(); + return outRoot; + } + + /** + * Appends src node to dst node's properties + * @param srcNode source node + * @param dstNode destination node where source node is appended + * @param isSourceLeaf flag which indicated if source node is leaf + */ + private void appendNode(Node srcNode, Map dstNode, boolean isSourceLeaf) { + Map existingProps = (Map) ((Map) dstNode.get(PROPERTIES)).get(srcNode.getNodeName()); + if (existingProps == null) { + Map srcNodeProps = srcNode.getProperties(); + Map newProps = isSourceLeaf ? + srcNodeProps : + new LinkedHashMap(); + // In case of type="nested" node, we need to copy that type field too, beside properties + if (srcNodeProps.containsKey(TYPE) && srcNodeProps.get(TYPE).equals(NESTED)) { + ((Map) dstNode.get(PROPERTIES)).put(srcNode.getNodeName(), new LinkedHashMap(Map.of(PROPERTIES, newProps, TYPE, NESTED))); + } else { + // Append src node to dst node's properties + ((Map) dstNode.get(PROPERTIES)).put( + srcNode.getNodeName(), + isSourceLeaf ? newProps : new LinkedHashMap(Map.of(PROPERTIES, newProps)) + ); + } + } + } + /** - * Traverses index mappings tree and (shallow) copies it. Listeners are notified when leaves are visited, + * Traverses index mappings tree and copies it into 1-level tree with flatten nodes. (level1.level2.level3) Listeners are notified when leaves are visited, * just like during {@link #traverse()} call. * Nodes which should be skipped({@link MappingsTraverser#propertiesToSkip}) will not be copied to a new tree * @return Copied tree * */ - public Map traverseAndShallowCopy() { + public Map traverseAndCopyAsFlat() { Map properties = new HashMap<>(); this.addListener(new MappingsTraverserListener() { @Override public void onLeafVisited(Node node) { - Node n = node; - while (n.parent != null) { - n = n.parent; - } - if (n == null) { - n = node; - } - properties.put(n.getNodeName(), n.getProperties()); + properties.put(node.currentPath, node.getProperties()); } @Override @@ -269,10 +327,16 @@ private void notifyLeafVisited(Node node) { ); } + public Map getMappingsMap() { + return mappingsMap; + } + static class Node { Map node; Node parent; Map properties; + Map parentProperties; + String parentKey; String currentPath; String name; @@ -280,9 +344,10 @@ public Node(Map node, String currentPath) { this.node = node; this.currentPath = currentPath; } - public Node(Map node, Node parent, String currentPath) { + public Node(Map node, Node parent, Map parentProperties, String parentKey, String currentPath) { this.node = node; this.parent = parent; + this.parentProperties = parentProperties; this.currentPath = currentPath; } /** diff --git a/src/main/java/org/opensearch/securityanalytics/model/CorrelatedFinding.java b/src/main/java/org/opensearch/securityanalytics/model/CorrelatedFinding.java new file mode 100644 index 000000000..d5f68339b --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/model/CorrelatedFinding.java @@ -0,0 +1,116 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.model; + +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.io.stream.Writeable; +import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class CorrelatedFinding implements Writeable, ToXContentObject { + + private String finding1; + + private String logType1; + + private String finding2; + + private String logType2; + + private List correlationRules; + + protected static final String FINDING1_FIELD = "finding1"; + protected static final String LOGTYPE1_FIELD = "logType1"; + protected static final String FINDING2_FIELD = "finding2"; + protected static final String LOGTYPE2_FIELD = "logType2"; + protected static final String RULES_FIELD = "rules"; + + public CorrelatedFinding(String finding1, String logType1, String finding2, String logType2, List correlationRules) { + this.finding1 = finding1; + this.logType1 = logType1; + this.finding2 = finding2; + this.logType2 = logType2; + this.correlationRules = correlationRules; + } + + public CorrelatedFinding(StreamInput sin) throws IOException { + this( + sin.readString(), + sin.readString(), + sin.readString(), + sin.readString(), + sin.readStringList() + ); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(finding1); + out.writeString(logType1); + out.writeString(finding2); + out.writeString(logType2); + out.writeStringCollection(correlationRules); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject() + .field(FINDING1_FIELD, finding1) + .field(LOGTYPE1_FIELD, logType1) + .field(FINDING2_FIELD, finding2) + .field(LOGTYPE2_FIELD, logType2) + .field(RULES_FIELD, correlationRules); + return builder.endObject(); + } + + public static CorrelatedFinding parse(XContentParser xcp) throws IOException { + String finding1 = null; + String logType1 = null; + String finding2 = null; + String logType2 = null; + List correlationRules = new ArrayList<>(); + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp); + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + String fieldName = xcp.currentName(); + xcp.nextToken(); + + switch (fieldName) { + case FINDING1_FIELD: + finding1 = xcp.text(); + break; + case LOGTYPE1_FIELD: + logType1 = xcp.text(); + break; + case FINDING2_FIELD: + finding2 = xcp.text(); + break; + case LOGTYPE2_FIELD: + logType2 = xcp.text(); + break; + case RULES_FIELD: + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp); + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + correlationRules.add(xcp.text()); + } + break; + default: + xcp.skipChildren(); + } + } + return new CorrelatedFinding(finding1, logType1, finding2, logType2, correlationRules); + } + + public static CorrelatedFinding readFrom(StreamInput sin) throws IOException { + return new CorrelatedFinding(sin); + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/model/CorrelationQuery.java b/src/main/java/org/opensearch/securityanalytics/model/CorrelationQuery.java new file mode 100644 index 000000000..d2940405c --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/model/CorrelationQuery.java @@ -0,0 +1,97 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.model; + +import java.io.IOException; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.io.stream.Writeable; +import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; + +public class CorrelationQuery implements Writeable, ToXContentObject { + + private static final Logger log = LogManager.getLogger(CorrelationQuery.class); + private static final String INDEX = "index"; + private static final String QUERY = "query"; + private static final String CATEGORY = "category"; + + private String index; + + private String query; + + private String category; + + public CorrelationQuery(String index, String query, String category) { + this.index = index; + this.query = query; + this.category = category; + } + + public CorrelationQuery(StreamInput sin) throws IOException { + this(sin.readString(), sin.readString(), sin.readString()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(index); + out.writeString(query); + out.writeString(category); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(INDEX, index).field(QUERY, query).field(CATEGORY, category); + return builder.endObject(); + } + + public static CorrelationQuery parse(XContentParser xcp) throws IOException { + String index = null; + String query = null; + String category = null; + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp); + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + String fieldName = xcp.currentName(); + xcp.nextToken(); + + switch (fieldName) { + case INDEX: + index = xcp.text(); + break; + case QUERY: + query = xcp.text(); + break; + case CATEGORY: + category = xcp.text(); + break; + default: + xcp.skipChildren(); + } + } + return new CorrelationQuery(index, query, category); + } + + public static CorrelationQuery readFrom(StreamInput sin) throws IOException { + return new CorrelationQuery(sin); + } + + public String getIndex() { + return index; + } + + public String getQuery() { + return query; + } + + public String getCategory() { + return category; + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/model/CorrelationRule.java b/src/main/java/org/opensearch/securityanalytics/model/CorrelationRule.java new file mode 100644 index 000000000..f29213e14 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/model/CorrelationRule.java @@ -0,0 +1,155 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.model; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.io.stream.Writeable; +import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; + +public class CorrelationRule implements Writeable, ToXContentObject { + + private static final Logger log = LogManager.getLogger(CorrelationRule.class); + + public static final String CORRELATION_RULE_INDEX = ".opensearch-sap-correlation-rules-config"; + + private static final String NAME = "name"; + public static final String NO_ID = ""; + public static final Long NO_VERSION = 1L; + private static final String CORRELATION_QUERIES = "correlate"; + + private String id; + + private Long version; + + private String name; + + private List correlationQueries; + + public CorrelationRule(String id, Long version, String name, List correlationQueries) { + this.id = id != null ? id : NO_ID; + this.version = version != null ? version : NO_VERSION; + this.name = name; + this.correlationQueries = correlationQueries; + } + + public CorrelationRule(StreamInput sin) throws IOException { + this(sin.readString(), sin.readLong(), sin.readString(), sin.readList(CorrelationQuery::readFrom)); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.field(NAME, name); + + CorrelationQuery[] correlationQueries = new CorrelationQuery[] {}; + correlationQueries = this.correlationQueries.toArray(correlationQueries); + builder.field(CORRELATION_QUERIES, correlationQueries); + return builder.endObject(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(id); + out.writeLong(version); + out.writeString(name); + + for (CorrelationQuery query : correlationQueries) { + query.writeTo(out); + } + } + + public static CorrelationRule parse(XContentParser xcp, String id, Long version) throws IOException { + if (id == null) { + id = NO_ID; + } + if (version == null) { + version = NO_VERSION; + } + + String name = null; + List correlationQueries = new ArrayList<>(); + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp); + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + String fieldName = xcp.currentName(); + xcp.nextToken(); + + switch (fieldName) { + case NAME: + name = xcp.text(); + break; + case CORRELATION_QUERIES: + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp); + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + CorrelationQuery query = CorrelationQuery.parse(xcp); + correlationQueries.add(query); + } + break; + default: + xcp.skipChildren(); + } + } + return new CorrelationRule(id, version, name, correlationQueries); + } + + public static CorrelationRule readFrom(StreamInput sin) throws IOException { + return new CorrelationRule(sin); + } + + public void setId(String id) { + this.id = id; + } + + public String getId() { + return id; + } + + public void setVersion(Long version) { + this.version = version; + } + + public Long getVersion() { + return version; + } + + public void setName(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + public List getCorrelationQueries() { + return correlationQueries; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CorrelationRule that = (CorrelationRule) o; + return id.equals(that.id) + && version.equals(that.version) + && name.equals(that.name) + && correlationQueries.equals(that.correlationQueries); + } + + @Override + public int hashCode() { + return Objects.hash(id, version, name, correlationQueries); + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/model/CreateMappingResult.java b/src/main/java/org/opensearch/securityanalytics/model/CreateMappingResult.java new file mode 100644 index 000000000..86eab3b8c --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/model/CreateMappingResult.java @@ -0,0 +1,49 @@ +/* +Copyright OpenSearch Contributors +SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.model; + +import java.util.Map; +import org.opensearch.action.support.master.AcknowledgedResponse; + +public class CreateMappingResult { + + private AcknowledgedResponse acknowledgedResponse; + private String concreteIndexName; + private Map mappings; + + public CreateMappingResult() {} + + public CreateMappingResult(AcknowledgedResponse acknowledgedResponse, String concreteIndexName, Map mappingsSource) { + this.acknowledgedResponse = acknowledgedResponse; + this.concreteIndexName = concreteIndexName; + this.mappings = mappingsSource; + } + + public AcknowledgedResponse getAcknowledgedResponse() { + return acknowledgedResponse; + } + + public void setAcknowledgedResponse(AcknowledgedResponse acknowledgedResponse) { + this.acknowledgedResponse = acknowledgedResponse; + } + + public String getConcreteIndexName() { + return concreteIndexName; + } + + public void setConcreteIndexName(String concreteIndexName) { + this.concreteIndexName = concreteIndexName; + } + + public Map getMappings() { + return mappings; + } + + public void setMappings(Map mappings) { + this.mappings = this.mappings; + } + +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/model/Detector.java b/src/main/java/org/opensearch/securityanalytics/model/Detector.java index ecf655f78..694d58b2c 100644 --- a/src/main/java/org/opensearch/securityanalytics/model/Detector.java +++ b/src/main/java/org/opensearch/securityanalytics/model/Detector.java @@ -4,22 +4,23 @@ */ package org.opensearch.securityanalytics.model; +import java.util.HashMap; +import java.util.Map; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.common.ParseField; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.io.stream.Writeable; -import org.opensearch.common.xcontent.NamedXContentRegistry; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentParserUtils; import org.opensearch.commons.alerting.model.CronSchedule; import org.opensearch.commons.alerting.model.Schedule; import org.opensearch.commons.authuser.User; - +import org.opensearch.core.ParseField; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; import java.io.IOException; import java.time.Instant; import java.util.ArrayList; @@ -49,6 +50,8 @@ public class Detector implements Writeable, ToXContentObject { public static final String LAST_UPDATE_TIME_FIELD = "last_update_time"; public static final String ENABLED_TIME_FIELD = "enabled_time"; public static final String ALERTING_MONITOR_ID = "monitor_id"; + + public static final String BUCKET_MONITOR_ID_RULE_ID = "bucket_monitor_id_rule_id"; private static final String RULE_TOPIC_INDEX = "rule_topic_index"; private static final String ALERTS_INDEX = "alert_index"; @@ -57,7 +60,10 @@ public class Detector implements Writeable, ToXContentObject { private static final String FINDINGS_INDEX = "findings_index"; private static final String FINDINGS_INDEX_PATTERN = "findings_index_pattern"; - public static final String DETECTORS_INDEX = ".opensearch-detectors-config"; + public static final String DETECTORS_INDEX = ".opensearch-sap-detectors-config"; + + // Used as a key in rule-monitor map for the purpose of easy detection of the doc level monitor + public static final String DOC_LEVEL_MONITOR = "-1"; public static final NamedXContentRegistry.Entry XCONTENT_REGISTRY = new NamedXContentRegistry.Entry( Detector.class, @@ -90,6 +96,8 @@ public class Detector implements Writeable, ToXContentObject { private List monitorIds; + private Map ruleIdMonitorIdMap; + private String ruleIndex; private String alertsIndex; @@ -108,7 +116,7 @@ public Detector(String id, Long version, String name, Boolean enabled, Schedule Instant lastUpdateTime, Instant enabledTime, DetectorType detectorType, User user, List inputs, List triggers, List monitorIds, String ruleIndex, String alertsIndex, String alertsHistoryIndex, String alertsHistoryIndexPattern, - String findingsIndex, String findingsIndexPattern) { + String findingsIndex, String findingsIndexPattern, Map rulePerMonitor) { this.type = DETECTOR_TYPE; this.id = id != null ? id : NO_ID; @@ -129,6 +137,7 @@ public Detector(String id, Long version, String name, Boolean enabled, Schedule this.alertsHistoryIndexPattern = alertsHistoryIndexPattern; this.findingsIndex = findingsIndex; this.findingsIndexPattern = findingsIndexPattern; + this.ruleIdMonitorIdMap = rulePerMonitor; if (enabled) { Objects.requireNonNull(enabledTime); @@ -149,12 +158,16 @@ public Detector(StreamInput sin) throws IOException { sin.readList(DetectorInput::readFrom), sin.readList(DetectorTrigger::readFrom), sin.readStringList(), - sin.readString(), - sin.readString(), - sin.readString(), - sin.readString(), - sin.readString(), - sin.readString()); + sin.readOptionalString(), + sin.readOptionalString(), + sin.readOptionalString(), + sin.readOptionalString(), + sin.readOptionalString(), + sin.readOptionalString(), + sin.readMap(StreamInput::readString, StreamInput::readString), + sin.readStringList(), + sin.readBoolean() + ); } @Override @@ -185,7 +198,13 @@ public void writeTo(StreamOutput out) throws IOException { it.writeTo(out); } out.writeStringCollection(monitorIds); - out.writeString(ruleIndex); + out.writeOptionalString(ruleIndex); + out.writeOptionalString(alertsIndex); + out.writeOptionalString(alertsHistoryIndex); + out.writeOptionalString(alertsHistoryIndexPattern); + out.writeOptionalString(findingsIndex); + out.writeOptionalString(findingsIndexPattern); + out.writeMap(ruleIdMonitorIdMap, StreamOutput::writeString, StreamOutput::writeString); } public XContentBuilder toXContentWithUser(XContentBuilder builder, Params params) throws IOException { @@ -198,32 +217,43 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } public enum DetectorType { - OTHERS_APPLICATION("others_application"), - OTHERS_APT("others_apt"), - OTHERS_CLOUD("others_cloud"), - OTHERS_COMPLIANCE("others_compliance"), - LINUX("linux"), - OTHERS_MACOS("others_macos"), - NETWORK("network"), - OTHERS_PROXY("others_proxy"), - OTHERS_WEB("others_web"), - WINDOWS("windows"), - AD_LDAP("ad_ldap"), - APACHE_ACCESS("apache_access"), - CLOUDTRAIL("cloudtrail"), - DNS("dns"), - S3("s3"); + OTHERS_APPLICATION("others_application", 0), + OTHERS_APT("others_apt", 1), + OTHERS_CLOUD("others_cloud", 2), + OTHERS_COMPLIANCE("others_compliance", 4), + LINUX("linux", 5), + OTHERS_MACOS("others_macos", 6), + NETWORK("network", 7), + OTHERS_PROXY("others_proxy", 8), + OTHERS_WEB("others_web", 9), + WINDOWS("windows", 10), + AD_LDAP("ad_ldap", 11), + APACHE_ACCESS("apache_access", 12), + CLOUDTRAIL("cloudtrail", 14), + DNS("dns", 15), + GITHUB("github", 16), + M365("m365", 17), + GWORKSPACE("gworkspace", 18), + OKTA("okta", 19), + AZURE("azure", 20), + S3("s3", 21), + TEST_WINDOWS("test_windows", 22); private String type; + private int dim; - DetectorType(String type) { + DetectorType(String type, int dim) { this.type = type; + this.dim = dim; } public String getDetectorType() { return type; } + public int getDim() { + return dim; + } } private XContentBuilder createXContentBuilder(XContentBuilder builder, ToXContent.Params params, Boolean secure) throws IOException { @@ -233,7 +263,7 @@ private XContentBuilder createXContentBuilder(XContentBuilder builder, ToXConten } builder.field(TYPE_FIELD, type) .field(NAME_FIELD, name) - .field(DETECTOR_TYPE_FIELD, detectorType); + .field(DETECTOR_TYPE_FIELD, detectorType.getDetectorType()); if (!secure) { if (user == null) { @@ -268,6 +298,7 @@ private XContentBuilder createXContentBuilder(XContentBuilder builder, ToXConten } builder.field(ALERTING_MONITOR_ID, monitorIds); + builder.field(BUCKET_MONITOR_ID_RULE_ID, ruleIdMonitorIdMap); builder.field(RULE_TOPIC_INDEX, ruleIndex); builder.field(ALERTS_INDEX, alertsIndex); builder.field(ALERTS_HISTORY_INDEX, alertsHistoryIndex); @@ -312,6 +343,8 @@ public static Detector parse(XContentParser xcp, String id, Long version) throws List inputs = new ArrayList<>(); List triggers = new ArrayList<>(); List monitorIds = new ArrayList<>(); + Map rulePerMonitor = new HashMap<>(); + String ruleIndex = null; String alertsIndex = null; String alertsHistoryIndex = null; @@ -390,6 +423,9 @@ public static Detector parse(XContentParser xcp, String id, Long version) throws monitorIds.add(monitorId); } break; + case BUCKET_MONITOR_ID_RULE_ID: + rulePerMonitor= xcp.mapStrings(); + break; case RULE_TOPIC_INDEX: ruleIndex = xcp.text(); break; @@ -437,7 +473,8 @@ public static Detector parse(XContentParser xcp, String id, Long version) throws alertsHistoryIndex, alertsHistoryIndexPattern, findingsIndex, - findingsIndexPattern); + findingsIndexPattern, + rulePerMonitor); } public static Detector readFrom(StreamInput sin) throws IOException { @@ -516,6 +553,12 @@ public List getMonitorIds() { return monitorIds; } + public void setUser(User user) { + this.user = user; + } + + public Map getRuleIdMonitorIdMap() {return ruleIdMonitorIdMap; } + public void setId(String id) { this.id = id; } @@ -563,6 +606,13 @@ public void setInputs(List inputs) { public void setMonitorIds(List monitorIds) { this.monitorIds = monitorIds; } + public void setRuleIdMonitorIdMap(Map ruleIdMonitorIdMap) { + this.ruleIdMonitorIdMap = ruleIdMonitorIdMap; + } + + public String getDocLevelMonitorId() { + return ruleIdMonitorIdMap.get(DOC_LEVEL_MONITOR); + } @Override public boolean equals(Object o) { diff --git a/src/main/java/org/opensearch/securityanalytics/model/DetectorInput.java b/src/main/java/org/opensearch/securityanalytics/model/DetectorInput.java index 0f7b7ef10..da7d65cb2 100644 --- a/src/main/java/org/opensearch/securityanalytics/model/DetectorInput.java +++ b/src/main/java/org/opensearch/securityanalytics/model/DetectorInput.java @@ -4,15 +4,15 @@ */ package org.opensearch.securityanalytics.model; -import org.opensearch.common.ParseField; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.io.stream.Writeable; -import org.opensearch.common.xcontent.NamedXContentRegistry; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.core.ParseField; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; diff --git a/src/main/java/org/opensearch/securityanalytics/model/DetectorRule.java b/src/main/java/org/opensearch/securityanalytics/model/DetectorRule.java index 5a1b2ed7a..c002687d9 100644 --- a/src/main/java/org/opensearch/securityanalytics/model/DetectorRule.java +++ b/src/main/java/org/opensearch/securityanalytics/model/DetectorRule.java @@ -7,10 +7,10 @@ import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.io.stream.Writeable; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; diff --git a/src/main/java/org/opensearch/securityanalytics/model/DetectorTrigger.java b/src/main/java/org/opensearch/securityanalytics/model/DetectorTrigger.java index f1309d570..3291c07ae 100644 --- a/src/main/java/org/opensearch/securityanalytics/model/DetectorTrigger.java +++ b/src/main/java/org/opensearch/securityanalytics/model/DetectorTrigger.java @@ -6,17 +6,18 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.common.ParseField; + import org.opensearch.common.UUIDs; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.io.stream.Writeable; -import org.opensearch.common.xcontent.NamedXContentRegistry; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentParserUtils; import org.opensearch.commons.alerting.model.action.Action; +import org.opensearch.core.ParseField; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.script.Script; import org.opensearch.script.ScriptType; @@ -27,6 +28,7 @@ import java.util.Locale; import java.util.Map; import java.util.Objects; +import java.util.stream.Collectors; public class DetectorTrigger implements Writeable, ToXContentObject { @@ -66,7 +68,9 @@ public DetectorTrigger(String id, String name, String severity, List rul this.id = id == null? UUIDs.base64UUID(): id; this.name = name; this.severity = severity; - this.ruleTypes = ruleTypes; + this.ruleTypes = ruleTypes.stream() + .map( e -> e.toLowerCase(Locale.ROOT)) + .collect(Collectors.toList()); this.ruleIds = ruleIds; this.ruleSeverityLevels = ruleSeverityLevels; this.tags = tags; diff --git a/src/main/java/org/opensearch/securityanalytics/model/FindingWithScore.java b/src/main/java/org/opensearch/securityanalytics/model/FindingWithScore.java new file mode 100644 index 000000000..2177d076e --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/model/FindingWithScore.java @@ -0,0 +1,106 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.model; + +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.io.stream.Writeable; +import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class FindingWithScore implements Writeable, ToXContentObject { + + protected static final String FINDING = "finding"; + protected static final String DETECTOR_TYPE = "detector_type"; + protected static final String SCORE = "score"; + protected static final String RULES = "rules"; + + private String finding; + + private String detectorType; + + private Double score; + + private List rules; + + public FindingWithScore(String finding, String detectorType, Double score, List rules) { + this.finding = finding; + this.detectorType = detectorType; + this.score = score; + this.rules = rules; + } + + public FindingWithScore(StreamInput sin) throws IOException { + this( + sin.readString(), + sin.readString(), + sin.readDouble(), + sin.readStringList() + ); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(finding); + out.writeString(detectorType); + out.writeDouble(score); + out.writeStringCollection(rules); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject() + .field(FINDING, finding) + .field(DETECTOR_TYPE, detectorType) + .field(SCORE, score) + .field(RULES, rules) + .endObject(); + return builder; + } + + public static FindingWithScore parse(XContentParser xcp) throws IOException { + String finding = null; + String detectorType = null; + Double score = null; + List rules = new ArrayList<>(); + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp); + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + String fieldName = xcp.currentName(); + xcp.nextToken(); + + switch (fieldName) { + case FINDING: + finding = xcp.text(); + break; + case DETECTOR_TYPE: + detectorType = xcp.text(); + break; + case SCORE: + score = xcp.doubleValue(); + break; + case RULES: + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp); + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + rules.add(xcp.text()); + } + break; + default: + xcp.skipChildren(); + } + } + return new FindingWithScore(finding, detectorType, score, rules); + } + + public static FindingWithScore readFrom(StreamInput sin) throws IOException { + return new FindingWithScore(sin); + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/model/Rule.java b/src/main/java/org/opensearch/securityanalytics/model/Rule.java index d5c90d02e..810492229 100644 --- a/src/main/java/org/opensearch/securityanalytics/model/Rule.java +++ b/src/main/java/org/opensearch/securityanalytics/model/Rule.java @@ -4,18 +4,24 @@ */ package org.opensearch.securityanalytics.model; +import org.apache.commons.lang3.tuple.Pair; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.common.ParseField; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.io.stream.Writeable; -import org.opensearch.common.xcontent.NamedXContentRegistry; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.core.ParseField; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.securityanalytics.rules.aggregation.AggregationItem; +import org.opensearch.securityanalytics.rules.backend.OSQueryBackend.AggregationQueries; +import org.opensearch.securityanalytics.rules.condition.ConditionItem; +import org.opensearch.securityanalytics.rules.exceptions.SigmaError; +import org.opensearch.securityanalytics.rules.objects.SigmaCondition; import org.opensearch.securityanalytics.rules.objects.SigmaRule; import java.io.IOException; @@ -50,10 +56,13 @@ public class Rule implements Writeable, ToXContentObject { public static final String STATUS = "status"; private static final String QUERIES = "queries"; + public static final String QUERY_FIELD_NAMES = "query_field_names"; + public static final String RULE = "rule"; - public static final String PRE_PACKAGED_RULES_INDEX = ".opensearch-pre-packaged-rules-config"; - public static final String CUSTOM_RULES_INDEX = ".opensearch-custom-rules-config"; + public static final String PRE_PACKAGED_RULES_INDEX = ".opensearch-sap-pre-packaged-rules-config"; + public static final String CUSTOM_RULES_INDEX = ".opensearch-sap-custom-rules-config"; + public static final String AGGREGATION_QUERIES = "aggregationQueries"; public static final NamedXContentRegistry.Entry XCONTENT_REGISTRY = new NamedXContentRegistry.Entry( Rule.class, @@ -89,12 +98,16 @@ public class Rule implements Writeable, ToXContentObject { private List queries; + private List queryFieldNames; + private String rule; + private List aggregationQueries; + public Rule(String id, Long version, String title, String category, String logSource, String description, List references, List tags, String level, List falsePositives, String author, String status, Instant date, - List queries, String rule) { + List queries, List queryFieldNames, String rule, List aggregationQueries) { this.id = id != null? id: NO_ID; this.version = version != null? version: NO_VERSION; @@ -115,11 +128,13 @@ public Rule(String id, Long version, String title, String category, String logSo this.date = date; this.queries = queries; + this.queryFieldNames = queryFieldNames; this.rule = rule; + this.aggregationQueries = aggregationQueries; } public Rule(String id, Long version, SigmaRule rule, String category, - List queries, String original) { + List queries, List queryFieldNames, String original) { this( id, version, @@ -136,8 +151,11 @@ public Rule(String id, Long version, SigmaRule rule, String category, rule.getAuthor(), rule.getStatus().toString(), Instant.ofEpochMilli(rule.getDate().getTime()), - queries.stream().map(Value::new).collect(Collectors.toList()), - original); + queries.stream().filter(query -> !(query instanceof AggregationQueries)).map(query -> new Value(query.toString())).collect(Collectors.toList()), + queryFieldNames.stream().map(Value::new).collect(Collectors.toList()), + original, + // If one of the queries is AggregationQuery -> the whole rule can be considered as Agg + queries.stream().filter(query -> query instanceof AggregationQueries).map(it -> new Value(it.toString())).collect(Collectors.toList())); } public Rule(StreamInput sin) throws IOException { @@ -156,7 +174,10 @@ public Rule(StreamInput sin) throws IOException { sin.readString(), sin.readInstant(), sin.readList(Value::readFrom), - sin.readString()); + sin.readList(Value::readFrom), + sin.readString(), + sin.readList(Value::readFrom) + ); } @Override @@ -180,7 +201,10 @@ public void writeTo(StreamOutput out) throws IOException { out.writeInstant(date); out.writeCollection(queries); + out.writeCollection(queryFieldNames); + out.writeString(rule); + out.writeCollection(aggregationQueries); } @Override @@ -220,6 +244,13 @@ private XContentBuilder createXContentBuilder(XContentBuilder builder, ToXConten Value[] queryArray = new Value[]{}; queryArray = queries.toArray(queryArray); builder.field(QUERIES, queryArray); + Value[] queryFieldNamesArray = new Value[]{}; + queryFieldNamesArray = queryFieldNames.toArray(queryFieldNamesArray); + builder.field(QUERY_FIELD_NAMES, queryFieldNamesArray); + + Value[] aggregationsArray = new Value[]{}; + aggregationsArray = aggregationQueries.toArray(aggregationsArray); + builder.field(AGGREGATION_QUERIES, aggregationsArray); builder.field(RULE, rule); if (params.paramAsBoolean("with_type", false)) { @@ -264,7 +295,9 @@ public static Rule parse(XContentParser xcp, String id, Long version) throws IOE Instant date = null; List queries = new ArrayList<>(); + List queryFields = new ArrayList<>(); String original = null; + List aggregationQueries = new ArrayList<>(); XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp); while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { @@ -320,9 +353,20 @@ public static Rule parse(XContentParser xcp, String id, Long version) throws IOE queries.add(Value.parse(xcp)); } break; + case QUERY_FIELD_NAMES: + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp); + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + queryFields.add(Value.parse(xcp)); + } + break; case RULE: original = xcp.text(); break; + case AGGREGATION_QUERIES: + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp); + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + aggregationQueries.add(Value.parse(xcp)); + } default: xcp.skipChildren(); } @@ -343,7 +387,9 @@ public static Rule parse(XContentParser xcp, String id, Long version) throws IOE status, date, queries, - Objects.requireNonNull(original, "Rule String is null") + queryFields, + Objects.requireNonNull(original, "Rule String is null"), + aggregationQueries ); } @@ -418,4 +464,25 @@ public String getRule() { public List getQueries() { return queries; } + + public List getQueryFieldNames() { + return queryFieldNames; + } + + public List getAggregationQueries() { return aggregationQueries; } + + public boolean isAggregationRule() { + return aggregationQueries != null && !aggregationQueries.isEmpty(); + } + + public List getAggregationItemsFromRule () throws SigmaError { + SigmaRule sigmaRule = SigmaRule.fromYaml(rule, true); + List aggregationItems = new ArrayList<>(); + for (SigmaCondition condition: sigmaRule.getDetection().getParsedCondition()) { + Pair parsedItems = condition.parsed(); + AggregationItem aggItem = parsedItems.getRight(); + aggregationItems.add(aggItem); + } + return aggregationItems; + } } \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/model/RuleCategory.java b/src/main/java/org/opensearch/securityanalytics/model/RuleCategory.java new file mode 100644 index 000000000..d189465b7 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/model/RuleCategory.java @@ -0,0 +1,94 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.model; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import org.opensearch.OpenSearchParseException; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.io.stream.Writeable; +import org.opensearch.common.settings.SettingsException; +import org.opensearch.common.xcontent.XContentHelper; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; + +public class RuleCategory implements Writeable, ToXContentObject { + + public static final String KEY = "key"; + public static final String DISPLAY_NAME = "display_name"; + + private String name; + private String displayName; + + public RuleCategory(StreamInput sin) throws IOException { + this(sin.readString(), sin.readString()); + } + + public RuleCategory(String name, String displayName) { + this.name = name; + this.displayName = displayName; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(name); + out.writeString(displayName); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject() + .field(KEY, name) + .field(DISPLAY_NAME, displayName) + .endObject(); + } + + public String getName() { + return name; + } + + + private static final String RULE_CATEGORIES_CONFIG_FILE = "rules/rule_categories.json"; + + // Rule category is the same as detector type + public static final List ALL_RULE_CATEGORIES; + + static { + List ruleCategories = new ArrayList<>(); + String ruleCategoriesJson; + try ( + InputStream is = RuleCategory.class.getClassLoader().getResourceAsStream(RULE_CATEGORIES_CONFIG_FILE) + ) { + ruleCategoriesJson = new String(Objects.requireNonNull(is).readAllBytes(), StandardCharsets.UTF_8); + + if (ruleCategoriesJson != null) { + Map configMap = + XContentHelper.convertToMap(JsonXContent.jsonXContent, ruleCategoriesJson, false); + List> categories = (List>) configMap.get("rule_categories"); + for (Map c : categories) { + ruleCategories.add(new RuleCategory( + (String) c.get(KEY), + (String) c.get(DISPLAY_NAME) + )); + } + } + } catch (OpenSearchParseException e) { + throw e; + } catch (Exception e) { + throw new SettingsException("Failed to load settings from [" + RULE_CATEGORIES_CONFIG_FILE + "]", e); + } + ALL_RULE_CATEGORIES = Collections.unmodifiableList(ruleCategories); + } +} + diff --git a/src/main/java/org/opensearch/securityanalytics/model/Value.java b/src/main/java/org/opensearch/securityanalytics/model/Value.java index d55e702b6..89298ae71 100644 --- a/src/main/java/org/opensearch/securityanalytics/model/Value.java +++ b/src/main/java/org/opensearch/securityanalytics/model/Value.java @@ -9,10 +9,10 @@ import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.io.stream.Writeable; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; import java.io.IOException; diff --git a/src/main/java/org/opensearch/securityanalytics/resthandler/RestAcknowledgeAlertsAction.java b/src/main/java/org/opensearch/securityanalytics/resthandler/RestAcknowledgeAlertsAction.java index df20ac15b..f02479c48 100644 --- a/src/main/java/org/opensearch/securityanalytics/resthandler/RestAcknowledgeAlertsAction.java +++ b/src/main/java/org/opensearch/securityanalytics/resthandler/RestAcknowledgeAlertsAction.java @@ -5,7 +5,7 @@ package org.opensearch.securityanalytics.resthandler; import org.opensearch.client.node.NodeClient; -import org.opensearch.common.xcontent.XContentParser; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; import org.opensearch.rest.action.RestToXContentListener; diff --git a/src/main/java/org/opensearch/securityanalytics/resthandler/RestCreateIndexMappingsAction.java b/src/main/java/org/opensearch/securityanalytics/resthandler/RestCreateIndexMappingsAction.java index f5f500dce..b0871c835 100644 --- a/src/main/java/org/opensearch/securityanalytics/resthandler/RestCreateIndexMappingsAction.java +++ b/src/main/java/org/opensearch/securityanalytics/resthandler/RestCreateIndexMappingsAction.java @@ -5,8 +5,8 @@ package org.opensearch.securityanalytics.resthandler; import org.opensearch.client.node.NodeClient; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; import org.opensearch.rest.action.RestToXContentListener; diff --git a/src/main/java/org/opensearch/securityanalytics/resthandler/RestDeleteCorrelationRuleAction.java b/src/main/java/org/opensearch/securityanalytics/resthandler/RestDeleteCorrelationRuleAction.java new file mode 100644 index 000000000..547e1661b --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/resthandler/RestDeleteCorrelationRuleAction.java @@ -0,0 +1,50 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.resthandler; + +import java.io.IOException; +import java.util.List; +import java.util.Locale; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.action.support.WriteRequest; +import org.opensearch.client.node.NodeClient; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.action.RestToXContentListener; +import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; +import org.opensearch.securityanalytics.action.DeleteCorrelationRuleAction; +import org.opensearch.securityanalytics.action.DeleteCorrelationRuleRequest; +import org.opensearch.securityanalytics.action.DeleteRuleAction; + +import static org.opensearch.securityanalytics.util.RestHandlerUtils.REFRESH; + +public class RestDeleteCorrelationRuleAction extends BaseRestHandler { + + private static final Logger log = LogManager.getLogger(RestDeleteCorrelationRuleAction.class); + + @Override + public String getName() { + return "delete_correlation_rule_action"; + } + + @Override + public List routes() { + return List.of( + new Route(RestRequest.Method.DELETE, String.format(Locale.getDefault(), "%s/{correlation_rule_id}", SecurityAnalyticsPlugin.CORRELATION_RULES_BASE_URI)) + ); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + log.debug(String.format(Locale.getDefault(), "%s %s/{correlation_rule_id}", request.method(), SecurityAnalyticsPlugin.CORRELATION_RULES_BASE_URI)); + + String ruleID = request.param("correlation_rule_id"); + + WriteRequest.RefreshPolicy refreshPolicy = WriteRequest.RefreshPolicy.parse(request.param(REFRESH, WriteRequest.RefreshPolicy.IMMEDIATE.getValue())); + DeleteCorrelationRuleRequest deleteRequest = new DeleteCorrelationRuleRequest(ruleID, refreshPolicy); + return channel -> client.execute(DeleteCorrelationRuleAction.INSTANCE, deleteRequest, new RestToXContentListener<>(channel)); + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/resthandler/RestGetAllRuleCategoriesAction.java b/src/main/java/org/opensearch/securityanalytics/resthandler/RestGetAllRuleCategoriesAction.java new file mode 100644 index 000000000..682337371 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/resthandler/RestGetAllRuleCategoriesAction.java @@ -0,0 +1,41 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.resthandler; + +import java.io.IOException; +import java.util.List; +import org.opensearch.client.node.NodeClient; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.action.RestToXContentListener; +import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; +import org.opensearch.securityanalytics.action.GetAllRuleCategoriesAction; +import org.opensearch.securityanalytics.action.GetAllRuleCategoriesRequest; + + +import static org.opensearch.rest.RestRequest.Method.GET; + +public class RestGetAllRuleCategoriesAction extends BaseRestHandler { + + @Override + public String getName() { + return "get_all_rule_categories_action"; + } + + @Override + public List routes() { + return List.of(new Route(GET, SecurityAnalyticsPlugin.RULE_BASE_URI + "/categories")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + + return channel -> client.execute( + GetAllRuleCategoriesAction.INSTANCE, + new GetAllRuleCategoriesRequest(), + new RestToXContentListener<>(channel) + ); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/resthandler/RestGetDetectorAction.java b/src/main/java/org/opensearch/securityanalytics/resthandler/RestGetDetectorAction.java index 317843d64..b23cb93fa 100644 --- a/src/main/java/org/opensearch/securityanalytics/resthandler/RestGetDetectorAction.java +++ b/src/main/java/org/opensearch/securityanalytics/resthandler/RestGetDetectorAction.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.Logger; import org.opensearch.client.node.NodeClient; -import org.opensearch.common.xcontent.XContentParser; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; import org.opensearch.rest.RestResponse; diff --git a/src/main/java/org/opensearch/securityanalytics/resthandler/RestGetIndexMappingsAction.java b/src/main/java/org/opensearch/securityanalytics/resthandler/RestGetIndexMappingsAction.java index e9d73402b..5f6e6c681 100644 --- a/src/main/java/org/opensearch/securityanalytics/resthandler/RestGetIndexMappingsAction.java +++ b/src/main/java/org/opensearch/securityanalytics/resthandler/RestGetIndexMappingsAction.java @@ -5,7 +5,7 @@ package org.opensearch.securityanalytics.resthandler; import org.opensearch.client.node.NodeClient; -import org.opensearch.common.xcontent.XContentParser; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; import org.opensearch.rest.action.RestToXContentListener; diff --git a/src/main/java/org/opensearch/securityanalytics/resthandler/RestGetMappingsViewAction.java b/src/main/java/org/opensearch/securityanalytics/resthandler/RestGetMappingsViewAction.java index 38c30f48c..486ca0fc1 100644 --- a/src/main/java/org/opensearch/securityanalytics/resthandler/RestGetMappingsViewAction.java +++ b/src/main/java/org/opensearch/securityanalytics/resthandler/RestGetMappingsViewAction.java @@ -7,7 +7,7 @@ import java.io.IOException; import java.util.List; import org.opensearch.client.node.NodeClient; -import org.opensearch.common.xcontent.XContentParser; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; import org.opensearch.rest.action.RestToXContentListener; diff --git a/src/main/java/org/opensearch/securityanalytics/resthandler/RestIndexCorrelationRuleAction.java b/src/main/java/org/opensearch/securityanalytics/resthandler/RestIndexCorrelationRuleAction.java new file mode 100644 index 000000000..83c355f33 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/resthandler/RestIndexCorrelationRuleAction.java @@ -0,0 +1,97 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.securityanalytics.resthandler; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.client.node.NodeClient; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.BytesRestResponse; +import org.opensearch.rest.RestChannel; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.RestResponse; +import org.opensearch.rest.RestStatus; +import org.opensearch.rest.action.RestResponseListener; + +import java.io.IOException; +import java.util.List; +import java.util.Locale; +import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; +import org.opensearch.securityanalytics.action.IndexCorrelationRuleAction; +import org.opensearch.securityanalytics.action.IndexCorrelationRuleRequest; +import org.opensearch.securityanalytics.action.IndexCorrelationRuleResponse; +import org.opensearch.securityanalytics.model.CorrelationRule; + +public class RestIndexCorrelationRuleAction extends BaseRestHandler { + + private static final Logger log = LogManager.getLogger(RestIndexCorrelationRuleAction.class); + + @Override + public String getName() { + return "index_correlation_rule_action"; + } + + @Override + public List routes() { + return List.of( + new Route(RestRequest.Method.POST, SecurityAnalyticsPlugin.CORRELATION_RULES_BASE_URI) + ); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + log.debug(String.format(Locale.ROOT, "%s %s", request.method(), SecurityAnalyticsPlugin.CORRELATION_RULES_BASE_URI)); + + String id = request.param("rule_id", CorrelationRule.NO_ID); + + XContentParser xcp = request.contentParser(); + + CorrelationRule correlationRule = CorrelationRule.parse(xcp, id, null); + IndexCorrelationRuleRequest indexCorrelationRuleRequest = new IndexCorrelationRuleRequest(id, correlationRule, request.method()); + return channel -> client.execute( + IndexCorrelationRuleAction.INSTANCE, + indexCorrelationRuleRequest, + indexCorrelationRuleResponse(channel, request.method()) + ); + } + + private RestResponseListener indexCorrelationRuleResponse( + RestChannel channel, + RestRequest.Method restMethod + ) { + return new RestResponseListener<>(channel) { + @Override + public RestResponse buildResponse(IndexCorrelationRuleResponse response) throws Exception { + RestStatus returnStatus = RestStatus.CREATED; + if (restMethod == RestRequest.Method.PUT) { + returnStatus = RestStatus.OK; + } + + BytesRestResponse restResponse = new BytesRestResponse( + returnStatus, + response.toXContent(channel.newBuilder(), ToXContent.EMPTY_PARAMS) + ); + + if (restMethod == RestRequest.Method.POST) { + String location = String.format( + Locale.ROOT, + "%s/%s", + SecurityAnalyticsPlugin.CORRELATION_RULES_BASE_URI, + response.getId() + ); + restResponse.addHeader("Location", location); + } + + return restResponse; + } + }; + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/resthandler/RestIndexDetectorAction.java b/src/main/java/org/opensearch/securityanalytics/resthandler/RestIndexDetectorAction.java index 26a87448c..62a59980e 100644 --- a/src/main/java/org/opensearch/securityanalytics/resthandler/RestIndexDetectorAction.java +++ b/src/main/java/org/opensearch/securityanalytics/resthandler/RestIndexDetectorAction.java @@ -8,9 +8,9 @@ import org.apache.logging.log4j.Logger; import org.opensearch.action.support.WriteRequest; import org.opensearch.client.node.NodeClient; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; diff --git a/src/main/java/org/opensearch/securityanalytics/resthandler/RestIndexRuleAction.java b/src/main/java/org/opensearch/securityanalytics/resthandler/RestIndexRuleAction.java index a6d064538..65484ec19 100644 --- a/src/main/java/org/opensearch/securityanalytics/resthandler/RestIndexRuleAction.java +++ b/src/main/java/org/opensearch/securityanalytics/resthandler/RestIndexRuleAction.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.Logger; import org.opensearch.action.support.WriteRequest; import org.opensearch.client.node.NodeClient; -import org.opensearch.common.xcontent.ToXContent; +import org.opensearch.core.xcontent.ToXContent; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; diff --git a/src/main/java/org/opensearch/securityanalytics/resthandler/RestListCorrelationAction.java b/src/main/java/org/opensearch/securityanalytics/resthandler/RestListCorrelationAction.java new file mode 100644 index 000000000..ce34c7904 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/resthandler/RestListCorrelationAction.java @@ -0,0 +1,73 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.resthandler; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.client.node.NodeClient; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.BytesRestResponse; +import org.opensearch.rest.RestChannel; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.RestResponse; +import org.opensearch.rest.action.RestResponseListener; +import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; +import org.opensearch.securityanalytics.action.ListCorrelationsAction; +import org.opensearch.securityanalytics.action.ListCorrelationsRequest; +import org.opensearch.securityanalytics.action.ListCorrelationsResponse; + +import java.io.IOException; +import java.util.List; +import java.util.Locale; + +import static org.opensearch.rest.RestRequest.Method.GET; +import static org.opensearch.rest.RestStatus.OK; + +public class RestListCorrelationAction extends BaseRestHandler { + + private static final Logger log = LogManager.getLogger(RestListCorrelationAction.class); + + @Override + public String getName() { + return "list_correlation_action"; + } + + @Override + public List routes() { + return List.of( + new Route(GET, SecurityAnalyticsPlugin.LIST_CORRELATIONS_URI) + ); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + log.debug(String.format(Locale.ROOT, "%s %s", request.method(), SecurityAnalyticsPlugin.LIST_CORRELATIONS_URI)); + + Long defaultTimestamp = System.currentTimeMillis(); + Long startTimestamp = request.paramAsLong("start_timestamp", defaultTimestamp - 300000L); + Long endTimestamp = request.paramAsLong("end_timestamp", defaultTimestamp); + + ListCorrelationsRequest correlationsRequest = new ListCorrelationsRequest(startTimestamp, endTimestamp); + return channel -> { + client.execute(ListCorrelationsAction.INSTANCE, correlationsRequest, new RestListCorrelationAction.RestListCorrelationResponseListener(channel, request)); + }; + } + + static class RestListCorrelationResponseListener extends RestResponseListener { + private final RestRequest request; + + RestListCorrelationResponseListener(RestChannel channel, RestRequest request) { + super(channel); + this.request = request; + } + + @Override + public RestResponse buildResponse(final ListCorrelationsResponse response) throws Exception { + return new BytesRestResponse(OK, response.toXContent(channel.newBuilder(), ToXContent.EMPTY_PARAMS)); + } + + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/resthandler/RestSearchCorrelationAction.java b/src/main/java/org/opensearch/securityanalytics/resthandler/RestSearchCorrelationAction.java new file mode 100644 index 000000000..c90b74753 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/resthandler/RestSearchCorrelationAction.java @@ -0,0 +1,84 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.resthandler; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.client.node.NodeClient; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.BytesRestResponse; +import org.opensearch.rest.RestChannel; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.RestResponse; +import org.opensearch.rest.action.RestResponseListener; +import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; +import org.opensearch.securityanalytics.action.CorrelatedFindingAction; +import org.opensearch.securityanalytics.action.CorrelatedFindingRequest; +import org.opensearch.securityanalytics.action.CorrelatedFindingResponse; +import org.opensearch.securityanalytics.model.Detector; + +import java.io.IOException; +import java.util.List; +import java.util.Locale; + +import static org.opensearch.rest.RestRequest.Method.GET; +import static org.opensearch.rest.RestStatus.OK; + +public class RestSearchCorrelationAction extends BaseRestHandler { + + private static final Logger log = LogManager.getLogger(RestSearchCorrelationAction.class); + + @Override + public String getName() { + return "search_correlation_action"; + } + + @Override + public List routes() { + return List.of( + new Route(GET, SecurityAnalyticsPlugin.FINDINGS_CORRELATE_URI) + ); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + log.debug(String.format(Locale.ROOT, "%s %s", request.method(), SecurityAnalyticsPlugin.FINDINGS_CORRELATE_URI)); + + String findingId = request.param("finding"); + if (findingId == null) { + throw new IllegalArgumentException("Missing finding"); + } + + String detectorType = request.param("detector_type"); + if (detectorType == null) { + throw new IllegalArgumentException("Missing detectorType"); + } + + long timeWindow = request.paramAsLong("time_window", 300000L); + int noOfNearbyFindings = request.paramAsInt("nearby_findings", 10); + + CorrelatedFindingRequest correlatedFindingRequest = new CorrelatedFindingRequest(findingId, Detector.DetectorType.valueOf(detectorType.toUpperCase(Locale.ROOT)), timeWindow, noOfNearbyFindings); + + return channel -> { + client.execute(CorrelatedFindingAction.INSTANCE, correlatedFindingRequest, new RestCorrelatedFindingResponseListener(channel, request)); + }; + } + + static class RestCorrelatedFindingResponseListener extends RestResponseListener { + private final RestRequest request; + + RestCorrelatedFindingResponseListener(RestChannel channel, RestRequest request) { + super(channel); + this.request = request; + } + + @Override + public RestResponse buildResponse(final CorrelatedFindingResponse response) throws Exception { + return new BytesRestResponse(OK, response.toXContent(channel.newBuilder(), ToXContent.EMPTY_PARAMS)); + } + + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/resthandler/RestSearchCorrelationRuleAction.java b/src/main/java/org/opensearch/securityanalytics/resthandler/RestSearchCorrelationRuleAction.java new file mode 100644 index 000000000..5b5f0fffa --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/resthandler/RestSearchCorrelationRuleAction.java @@ -0,0 +1,88 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.resthandler; + +import java.io.IOException; +import java.util.List; +import java.util.Locale; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.client.node.NodeClient; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.action.RestToXContentListener; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; +import org.opensearch.securityanalytics.action.SearchCorrelationRuleAction; +import org.opensearch.securityanalytics.action.SearchCorrelationRuleRequest; +import org.opensearch.securityanalytics.model.CorrelationRule; + +public class RestSearchCorrelationRuleAction extends BaseRestHandler { + + private static final Logger log = LogManager.getLogger(RestSearchCorrelationRuleAction.class); + + @Override + public String getName() { + return "search_correlation_rule_action"; + } + + @Override + public List routes() { + return List.of( + new Route(RestRequest.Method.POST, SecurityAnalyticsPlugin.CORRELATION_RULES_BASE_URI + "/_search") + ); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + log.debug(String.format(Locale.getDefault(), "%s %s/_search", request.method(), SecurityAnalyticsPlugin.CORRELATION_RULES_BASE_URI)); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.parseXContent(request.contentOrSourceParamParser()); + searchSourceBuilder.fetchSource(null); + + QueryBuilder queryBuilder = QueryBuilders.boolQuery().must(searchSourceBuilder.query()); + + searchSourceBuilder.query(queryBuilder) + .seqNoAndPrimaryTerm(true) + .version(true); + SearchRequest searchRequest = new SearchRequest() + .source(searchSourceBuilder) + .indices(CorrelationRule.CORRELATION_RULE_INDEX); + + SearchCorrelationRuleRequest searchCorrelationRuleRequest = new SearchCorrelationRuleRequest(searchRequest); + return channel -> client.execute(SearchCorrelationRuleAction.INSTANCE, searchCorrelationRuleRequest, new RestToXContentListener<>(channel)); + } + +// private RestResponseListener searchRuleResponse(RestChannel channel) { +// return new RestResponseListener<>(channel) { +// @Override +// public RestResponse buildResponse(SearchResponse response) throws Exception { +// if (response.isTimedOut()) { +// return new BytesRestResponse(RestStatus.REQUEST_TIMEOUT, response.toString()); +// } +// +// try { +// for (SearchHit hit: response.getHits()) { +// XContentParser xcp = XContentType.JSON.xContent().createParser( +// channel.request().getXContentRegistry(), +// LoggingDeprecationHandler.INSTANCE, hit.getSourceAsString() +// ); +// +// Rule rule = Rule.docParse(xcp, hit.getId(), hit.getVersion()); +// XContentBuilder xcb = rule.toXContent(XContentFactory.jsonBuilder(), EMPTY_PARAMS); +// hit.sourceRef(BytesReference.bytes(xcb)); +// } +// } catch (Exception ex) { +// log.info("The rule parsing failed. Will return response as is."); +// } +// return new BytesRestResponse(RestStatus.OK, response.toXContent(channel.newBuilder(), EMPTY_PARAMS)); +// } +// }; +// } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/resthandler/RestSearchDetectorAction.java b/src/main/java/org/opensearch/securityanalytics/resthandler/RestSearchDetectorAction.java index c5a49f34c..34f6af368 100644 --- a/src/main/java/org/opensearch/securityanalytics/resthandler/RestSearchDetectorAction.java +++ b/src/main/java/org/opensearch/securityanalytics/resthandler/RestSearchDetectorAction.java @@ -14,11 +14,11 @@ import org.opensearch.client.node.NodeClient; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; diff --git a/src/main/java/org/opensearch/securityanalytics/resthandler/RestSearchRuleAction.java b/src/main/java/org/opensearch/securityanalytics/resthandler/RestSearchRuleAction.java index 67df61303..03003ca3f 100644 --- a/src/main/java/org/opensearch/securityanalytics/resthandler/RestSearchRuleAction.java +++ b/src/main/java/org/opensearch/securityanalytics/resthandler/RestSearchRuleAction.java @@ -11,10 +11,10 @@ import org.opensearch.client.node.NodeClient; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.rest.BaseRestHandler; @@ -35,7 +35,7 @@ import java.util.List; import java.util.Locale; -import static org.opensearch.common.xcontent.ToXContent.EMPTY_PARAMS; +import static org.opensearch.core.xcontent.ToXContent.EMPTY_PARAMS; public class RestSearchRuleAction extends BaseRestHandler { @@ -60,7 +60,6 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli Boolean isPrepackaged = request.paramAsBoolean("pre_packaged", true); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.parseXContent(request.contentOrSourceParamParser()); - searchSourceBuilder.fetchSource(null); QueryBuilder queryBuilder = QueryBuilders.boolQuery().must(searchSourceBuilder.query()); diff --git a/src/main/java/org/opensearch/securityanalytics/resthandler/RestUpdateIndexMappingsAction.java b/src/main/java/org/opensearch/securityanalytics/resthandler/RestUpdateIndexMappingsAction.java index 61ff75036..2f8371aa1 100644 --- a/src/main/java/org/opensearch/securityanalytics/resthandler/RestUpdateIndexMappingsAction.java +++ b/src/main/java/org/opensearch/securityanalytics/resthandler/RestUpdateIndexMappingsAction.java @@ -5,8 +5,8 @@ package org.opensearch.securityanalytics.resthandler; import org.opensearch.client.node.NodeClient; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; import org.opensearch.rest.action.RestToXContentListener; diff --git a/src/main/java/org/opensearch/securityanalytics/resthandler/RestValidateRulesAction.java b/src/main/java/org/opensearch/securityanalytics/resthandler/RestValidateRulesAction.java new file mode 100644 index 000000000..7abf5421f --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/resthandler/RestValidateRulesAction.java @@ -0,0 +1,62 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.resthandler; + +import java.io.IOException; +import java.util.List; +import java.util.Locale; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.action.support.WriteRequest; +import org.opensearch.client.node.NodeClient; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.BytesRestResponse; +import org.opensearch.rest.RestChannel; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.RestResponse; +import org.opensearch.rest.RestStatus; +import org.opensearch.rest.action.RestResponseListener; +import org.opensearch.rest.action.RestToXContentListener; +import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; +import org.opensearch.securityanalytics.action.CreateIndexMappingsRequest; +import org.opensearch.securityanalytics.action.IndexRuleAction; +import org.opensearch.securityanalytics.action.IndexRuleRequest; +import org.opensearch.securityanalytics.action.IndexRuleResponse; +import org.opensearch.securityanalytics.action.ValidateRulesAction; +import org.opensearch.securityanalytics.action.ValidateRulesRequest; +import org.opensearch.securityanalytics.model.Detector; +import org.opensearch.securityanalytics.util.RestHandlerUtils; + +public class RestValidateRulesAction extends BaseRestHandler { + + private static final Logger log = LogManager.getLogger(RestValidateRulesAction.class); + + @Override + public String getName() { + return "validate_rules_action"; + } + + @Override + public List routes() { + return List.of( + new Route(RestRequest.Method.POST, SecurityAnalyticsPlugin.RULE_BASE_URI + "/validate") + ); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + + + ValidateRulesRequest req; + try (XContentParser xcp = request.contentParser()) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp); + req = ValidateRulesRequest.parse(xcp); + } + return channel -> client.execute(ValidateRulesAction.INSTANCE, req, new RestToXContentListener<>(channel)); + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/rules/backend/AggregationBuilders.java b/src/main/java/org/opensearch/securityanalytics/rules/backend/AggregationBuilders.java new file mode 100644 index 000000000..3927186fb --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/rules/backend/AggregationBuilders.java @@ -0,0 +1,56 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.rules.backend; + +import java.util.Locale; +import org.apache.commons.lang3.NotImplementedException; +import org.opensearch.search.aggregations.AggregationBuilder; +import org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder; +import org.opensearch.search.aggregations.metrics.AvgAggregationBuilder; +import org.opensearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.opensearch.search.aggregations.metrics.MedianAbsoluteDeviationAggregationBuilder; +import org.opensearch.search.aggregations.metrics.MinAggregationBuilder; +import org.opensearch.search.aggregations.metrics.SumAggregationBuilder; +import org.opensearch.search.aggregations.metrics.ValueCountAggregationBuilder; + +public final class AggregationBuilders { + + /** + * Finds the builder aggregation based on the forwarded function + * + * @param aggregationFunction Aggregation function + * @param name Name of the aggregation + * @return Aggregation builder + */ + public static AggregationBuilder getAggregationBuilderByFunction(String aggregationFunction, String name) { + AggregationBuilder aggregationBuilder; + switch (aggregationFunction.toLowerCase(Locale.ROOT)) { + case AvgAggregationBuilder.NAME: + aggregationBuilder = new AvgAggregationBuilder(name).field(name); + break; + case MaxAggregationBuilder.NAME: + aggregationBuilder = new MaxAggregationBuilder(name).field(name); + break; + case MedianAbsoluteDeviationAggregationBuilder.NAME: + aggregationBuilder = new MedianAbsoluteDeviationAggregationBuilder(name).field(name); + break; + case MinAggregationBuilder.NAME: + aggregationBuilder = new MinAggregationBuilder(name).field(name); + break; + case SumAggregationBuilder.NAME: + aggregationBuilder = new SumAggregationBuilder(name).field(name); + break; + case TermsAggregationBuilder.NAME: + aggregationBuilder = new TermsAggregationBuilder(name).field(name); + break; + case "count": + aggregationBuilder = new ValueCountAggregationBuilder(name).field(name); + break; + default: + throw new NotImplementedException(String.format(Locale.getDefault(), "Aggregation %s not supported by the backend", aggregationFunction)); + } + return aggregationBuilder; + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/rules/backend/OSQueryBackend.java b/src/main/java/org/opensearch/securityanalytics/rules/backend/OSQueryBackend.java index a84c82ae9..07455945c 100644 --- a/src/main/java/org/opensearch/securityanalytics/rules/backend/OSQueryBackend.java +++ b/src/main/java/org/opensearch/securityanalytics/rules/backend/OSQueryBackend.java @@ -4,6 +4,22 @@ */ package org.opensearch.securityanalytics.rules.backend; +import org.opensearch.OpenSearchParseException; +import org.opensearch.common.UUIDs; +import org.opensearch.common.bytes.BytesReference; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.io.stream.Writeable; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorExtAggregationBuilder; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.script.Script; +import org.opensearch.search.aggregations.AggregationBuilder; +import org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.opensearch.securityanalytics.rules.aggregation.AggregationItem; import org.opensearch.securityanalytics.rules.condition.ConditionAND; import org.opensearch.securityanalytics.rules.condition.ConditionFieldEqualsValueExpression; @@ -25,7 +41,6 @@ import org.apache.commons.lang3.NotImplementedException; import java.io.IOException; -import java.io.Serializable; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -86,6 +101,8 @@ public class OSQueryBackend extends QueryBackend { private String bucketTriggerQuery; + private String bucketTriggerScript; + private static final String groupExpression = "(%s)"; private static final Map compareOperators = Map.of( SigmaCompareExpression.CompareOperators.GT, "gt", @@ -114,16 +131,17 @@ public OSQueryBackend(String ruleCategory, boolean collectErrors, boolean enable this.reEscapeChar = "\\"; this.reExpression = "%s: /%s/"; this.cidrExpression = "%s: \"%s\""; - this.fieldNullExpression = "%s: null"; - this.unboundValueStrExpression = "%s: \"%s\""; - this.unboundValueNumExpression = "%s: %s"; - this.unboundWildcardExpression = "%s: %s"; - this.unboundReExpression = "%s: /%s/"; + this.fieldNullExpression = "%s: (NOT [* TO *])"; + this.unboundValueStrExpression = "\"%s\""; + this.unboundValueNumExpression = "\"%s\""; + this.unboundWildcardExpression = "%s"; + this.unboundReExpression = "/%s/"; this.compareOpExpression = "\"%s\" \"%s\" %s"; this.valExpCount = 0; - this.aggQuery = "\"aggs\":{\"%s\":{\"terms\":{\"field\":\"%s\"},\"aggs\":{\"%s\":{\"%s\":{\"field\":\"%s\"}}}}}"; - this.aggCountQuery = "\"aggs\":{\"%s\":{\"terms\":{\"field\":\"%s\"}}}"; + this.aggQuery = "{\"%s\":{\"terms\":{\"field\":\"%s\"},\"aggs\":{\"%s\":{\"%s\":{\"field\":\"%s\"}}}}}"; + this.aggCountQuery = "{\"%s\":{\"terms\":{\"field\":\"%s\"}}}"; this.bucketTriggerQuery = "{\"buckets_path\":{\"%s\":\"%s\"},\"parent_bucket_path\":\"%s\",\"script\":{\"source\":\"params.%s %s %s\",\"lang\":\"painless\"}}"; + this.bucketTriggerScript = "params.%s %s %s"; } @Override @@ -313,29 +331,22 @@ public Object convertConditionFieldEqValQueryExpr(ConditionFieldEqualsValueExpre @Override public Object convertConditionValStr(ConditionValueExpression condition) throws SigmaValueError { - SigmaString value = (SigmaString) condition.getValue(); - String field = getFinalValueField(); ruleQueryFields.put(field, Map.of("type", "text", "analyzer", "rule_analyzer")); + SigmaString value = (SigmaString) condition.getValue(); boolean containsWildcard = value.containsWildcard(); - return String.format(Locale.getDefault(), (containsWildcard? this.unboundWildcardExpression: this.unboundValueStrExpression), field, this.convertValueStr((SigmaString) condition.getValue())); + return String.format(Locale.getDefault(), (containsWildcard? this.unboundWildcardExpression: this.unboundValueStrExpression), + this.convertValueStr((SigmaString) condition.getValue())); } @Override public Object convertConditionValNum(ConditionValueExpression condition) { - String field = getFinalValueField(); - - SigmaNumber number = (SigmaNumber) condition.getValue(); - ruleQueryFields.put(field, number.getNumOpt().isLeft()? Collections.singletonMap("type", "integer"): Collections.singletonMap("type", "float")); - - return String.format(Locale.getDefault(), this.unboundValueNumExpression, field, condition.getValue().toString()); + return String.format(Locale.getDefault(), this.unboundValueNumExpression, condition.getValue().toString()); } @Override public Object convertConditionValRe(ConditionValueExpression condition) { - String field = getFinalValueField(); - ruleQueryFields.put(field, Map.of("type", "text", "analyzer", "rule_analyzer")); - return String.format(Locale.getDefault(), this.unboundReExpression, field, convertValueRe((SigmaRegularExpression) condition.getValue())); + return String.format(Locale.getDefault(), this.unboundReExpression, convertValueRe((SigmaRegularExpression) condition.getValue())); } // TODO: below methods will be supported when Sigma Expand Modifier is supported. @@ -346,24 +357,48 @@ public Object convertConditionValQueryExpr(ConditionValueExpression condition) { }*/ @Override - public Object convertAggregation(AggregationItem aggregation) { + public AggregationQueries convertAggregation(AggregationItem aggregation) { String fmtAggQuery; String fmtBucketTriggerQuery; + TermsAggregationBuilder aggBuilder = new TermsAggregationBuilder("result_agg"); + BucketSelectorExtAggregationBuilder condition; + String bucketTriggerSelectorId = UUIDs.base64UUID(); + if (aggregation.getAggFunction().equals("count")) { + String fieldName; if (aggregation.getAggField().equals("*") && aggregation.getGroupByField() == null) { + fieldName = "_index"; fmtAggQuery = String.format(Locale.getDefault(), aggCountQuery, "result_agg", "_index"); } else { + fieldName = aggregation.getGroupByField(); fmtAggQuery = String.format(Locale.getDefault(), aggCountQuery, "result_agg", aggregation.getGroupByField()); } + aggBuilder.field(fieldName); fmtBucketTriggerQuery = String.format(Locale.getDefault(), bucketTriggerQuery, "_cnt", "_cnt", "result_agg", "_cnt", aggregation.getCompOperator(), aggregation.getThreshold()); + + Script script = new Script(String.format(Locale.getDefault(), bucketTriggerScript, "_cnt", aggregation.getCompOperator(), aggregation.getThreshold())); + condition = new BucketSelectorExtAggregationBuilder(bucketTriggerSelectorId, Collections.singletonMap("_cnt", "_cnt"), script, "result_agg", null); } else { fmtAggQuery = String.format(Locale.getDefault(), aggQuery, "result_agg", aggregation.getGroupByField(), aggregation.getAggField(), aggregation.getAggFunction(), aggregation.getAggField()); fmtBucketTriggerQuery = String.format(Locale.getDefault(), bucketTriggerQuery, aggregation.getAggField(), aggregation.getAggField(), "result_agg", aggregation.getAggField(), aggregation.getCompOperator(), aggregation.getThreshold()); + + // Add subaggregation + AggregationBuilder subAgg = AggregationBuilders.getAggregationBuilderByFunction(aggregation.getAggFunction(), aggregation.getAggField()); + if (subAgg != null) { + aggBuilder.field(aggregation.getGroupByField()).subAggregation(subAgg); + } + + Script script = new Script(String.format(Locale.getDefault(), bucketTriggerScript, aggregation.getAggField(), aggregation.getCompOperator(), aggregation.getThreshold())); + condition = new BucketSelectorExtAggregationBuilder(bucketTriggerSelectorId, Collections.singletonMap(aggregation.getAggField(), aggregation.getAggField()), script, "result_agg", null); } - AggregationQueries aggQueries = new AggregationQueries(); - aggQueries.setAggQuery(fmtAggQuery); - aggQueries.setBucketTriggerQuery(fmtBucketTriggerQuery); - return aggQueries; + + AggregationQueries aggregationQueries = new AggregationQueries(); + aggregationQueries.setAggQuery(fmtAggQuery); + aggregationQueries.setBucketTriggerQuery(fmtBucketTriggerQuery); + aggregationQueries.setAggBuilder(aggBuilder); + aggregationQueries.setCondition(condition); + + return aggregationQueries; } private boolean comparePrecedence(ConditionType outer, ConditionType inner) { @@ -416,26 +451,111 @@ private String getFinalValueField() { return field; } - public static class AggregationQueries implements Serializable { + public static class AggregationQueries implements Writeable, ToXContentObject { + private static final String AGG_QUERY = "aggQuery"; + private static final String BUCKET_TRIGGER_QUERY = "bucketTriggerQuery"; + + public AggregationQueries() { + } + + public AggregationQueries(StreamInput in) throws IOException { + this.aggQuery = in.readString(); + this.bucketTriggerQuery = in.readString(); + } + + public static AggregationQueries docParse(XContentParser xcp) throws IOException{ + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp); + return AggregationQueries.parse(xcp); + } + + public static AggregationQueries parse(XContentParser xcp) throws IOException { + String aggQuery = null; + String bucketTriggerQuery = null; + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp); + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + String fieldName = xcp.currentName(); + xcp.nextToken(); + + switch (fieldName) { + case AGG_QUERY: + aggQuery = xcp.text(); + break; + case BUCKET_TRIGGER_QUERY: + bucketTriggerQuery = xcp.text(); + break; + default: + xcp.skipChildren(); + } + } + AggregationQueries aggregationQueries = new AggregationQueries(); + aggregationQueries.setAggQuery(aggQuery); + aggregationQueries.setBucketTriggerQuery(bucketTriggerQuery); + + return aggregationQueries; + } private String aggQuery; + private AggregationBuilder aggBuilder; + private String bucketTriggerQuery; + private BucketSelectorExtAggregationBuilder condition; + + public String getAggQuery() { + return aggQuery; + } + public void setAggQuery(String aggQuery) { this.aggQuery = aggQuery; } - public String getAggQuery() { - return aggQuery; + public AggregationBuilder getAggBuilder() { + return aggBuilder; } - public void setBucketTriggerQuery(String bucketTriggerQuery) { - this.bucketTriggerQuery = bucketTriggerQuery; + public void setAggBuilder(AggregationBuilder aggBuilder) { + this.aggBuilder = aggBuilder; } public String getBucketTriggerQuery() { return bucketTriggerQuery; } + + public void setBucketTriggerQuery(String bucketTriggerQuery) { + this.bucketTriggerQuery = bucketTriggerQuery; + } + + public BucketSelectorExtAggregationBuilder getCondition() { + return condition; + } + + public void setCondition(BucketSelectorExtAggregationBuilder condition) { + this.condition = condition; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return createXContentBuilder(builder); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(aggQuery); + out.writeString(bucketTriggerQuery); + } + + private XContentBuilder createXContentBuilder(XContentBuilder builder) throws IOException { + return builder.startObject().field(AGG_QUERY, aggQuery).field(BUCKET_TRIGGER_QUERY, bucketTriggerQuery).endObject(); + } + + public String toString() { + try { + return BytesReference.bytes(this.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)).utf8ToString(); + } catch (IOException ex) { + throw new OpenSearchParseException("failed to convert source to a json string", new Object[0]); + } + } } } diff --git a/src/main/java/org/opensearch/securityanalytics/rules/backend/QueryBackend.java b/src/main/java/org/opensearch/securityanalytics/rules/backend/QueryBackend.java index 8a4d00a52..1b868054e 100644 --- a/src/main/java/org/opensearch/securityanalytics/rules/backend/QueryBackend.java +++ b/src/main/java/org/opensearch/securityanalytics/rules/backend/QueryBackend.java @@ -4,7 +4,10 @@ */ package org.opensearch.securityanalytics.rules.backend; +import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorExtAggregationBuilder; +import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.securityanalytics.rules.aggregation.AggregationItem; +import org.opensearch.securityanalytics.rules.backend.OSQueryBackend.AggregationQueries; import org.opensearch.securityanalytics.rules.condition.ConditionAND; import org.opensearch.securityanalytics.rules.condition.ConditionFieldEqualsValueExpression; import org.opensearch.securityanalytics.rules.condition.ConditionItem; @@ -28,7 +31,9 @@ import org.opensearch.securityanalytics.rules.utils.AnyOneOf; import org.opensearch.securityanalytics.rules.utils.Either; import org.apache.commons.lang3.tuple.Pair; +import org.yaml.snakeyaml.LoaderOptions; import org.yaml.snakeyaml.Yaml; +import org.yaml.snakeyaml.constructor.SafeConstructor; import java.io.IOException; import java.io.InputStream; @@ -67,7 +72,7 @@ public QueryBackend(String ruleCategory, boolean convertAndAsIn, boolean enableF assert is != null; String content = new String(is.readAllBytes(), Charset.defaultCharset()); - Yaml yaml = new Yaml(); + Yaml yaml = new Yaml(new SafeConstructor(new LoaderOptions())); Map fieldMappingsObj = yaml.load(content); this.fieldMappings = (Map) fieldMappingsObj.get("fieldmappings"); @@ -176,6 +181,13 @@ public Map getQueryFields() { return queryFields; } + public void resetQueryFields() { + queryFields.clear(); + if (ruleQueryFields != null) { + ruleQueryFields.clear(); + } + } + public abstract Object convertConditionAsInExpression(Either condition); public abstract Object convertConditionAnd(ConditionAND condition); @@ -261,5 +273,5 @@ public Object convertConditionVal(ConditionValueExpression condition) throws Sig /* public abstract Object convertConditionValQueryExpr(ConditionValueExpression condition);*/ - public abstract Object convertAggregation(AggregationItem aggregation); + public abstract AggregationQueries convertAggregation(AggregationItem aggregation) throws SigmaError; } diff --git a/src/main/java/org/opensearch/securityanalytics/rules/objects/SigmaDetectionItem.java b/src/main/java/org/opensearch/securityanalytics/rules/objects/SigmaDetectionItem.java index a334ca758..c74bd9177 100644 --- a/src/main/java/org/opensearch/securityanalytics/rules/objects/SigmaDetectionItem.java +++ b/src/main/java/org/opensearch/securityanalytics/rules/objects/SigmaDetectionItem.java @@ -18,6 +18,7 @@ import org.opensearch.securityanalytics.rules.modifiers.SigmaModifierFacade; import org.opensearch.securityanalytics.rules.modifiers.SigmaValueModifier; import org.opensearch.securityanalytics.rules.types.SigmaNull; +import org.opensearch.securityanalytics.rules.types.SigmaString; import org.opensearch.securityanalytics.rules.types.SigmaType; import org.opensearch.securityanalytics.rules.types.SigmaTypeFacade; import org.opensearch.securityanalytics.rules.utils.AnyOneOf; @@ -111,7 +112,14 @@ public static SigmaDetectionItem fromMapping(String key, Either> List sigmaTypes = new ArrayList<>(); for (T v: values) { - sigmaTypes.add(SigmaTypeFacade.sigmaType(v)); + SigmaType sigmaType = SigmaTypeFacade.sigmaType(v); + // throws an error if sigmaType is an empty string and the modifier is "contains" or "startswith" or "endswith" + boolean invalidModifierWithEmptyString = modifierIds.contains("contains") || modifierIds.contains("startswith") || modifierIds.contains("endswith"); + if (sigmaType.getClass().equals(SigmaString.class) && v.toString().isEmpty() && invalidModifierWithEmptyString) { + throw new SigmaValueError("Cannot create rule with empty string and given modifier(s): " + modifierIds); + } else { + sigmaTypes.add(sigmaType); + } } return new SigmaDetectionItem(field, modifiers, sigmaTypes, null, null, true); diff --git a/src/main/java/org/opensearch/securityanalytics/rules/objects/SigmaRule.java b/src/main/java/org/opensearch/securityanalytics/rules/objects/SigmaRule.java index 7eff821a5..e3de43649 100644 --- a/src/main/java/org/opensearch/securityanalytics/rules/objects/SigmaRule.java +++ b/src/main/java/org/opensearch/securityanalytics/rules/objects/SigmaRule.java @@ -11,7 +11,11 @@ import org.opensearch.securityanalytics.rules.exceptions.SigmaLevelError; import org.opensearch.securityanalytics.rules.exceptions.SigmaLogsourceError; import org.opensearch.securityanalytics.rules.exceptions.SigmaStatusError; +import org.yaml.snakeyaml.DumperOptions; +import org.yaml.snakeyaml.LoaderOptions; import org.yaml.snakeyaml.Yaml; +import org.yaml.snakeyaml.constructor.SafeConstructor; +import org.yaml.snakeyaml.representer.Representer; import java.text.SimpleDateFormat; import java.util.ArrayList; @@ -167,7 +171,10 @@ protected static SigmaRule fromDict(Map rule, boolean collectErr } public static SigmaRule fromYaml(String rule, boolean collectErrors) throws SigmaError { - Yaml yaml = new Yaml(); + LoaderOptions loaderOptions = new LoaderOptions(); + loaderOptions.setNestingDepthLimit(10); + + Yaml yaml = new Yaml(new SafeConstructor(new LoaderOptions()), new Representer(new DumperOptions()), new DumperOptions(), loaderOptions); Map ruleMap = yaml.load(rule); return fromDict(ruleMap, collectErrors); } diff --git a/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java b/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java index 693f6af05..a3836673a 100644 --- a/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java +++ b/src/main/java/org/opensearch/securityanalytics/settings/SecurityAnalyticsSettings.java @@ -4,12 +4,104 @@ */ package org.opensearch.securityanalytics.settings; +import java.util.concurrent.TimeUnit; import org.opensearch.common.settings.Setting; import org.opensearch.common.unit.TimeValue; public class SecurityAnalyticsSettings { + public static final String CORRELATION_INDEX = "index.correlation"; public static Setting INDEX_TIMEOUT = Setting.positiveTimeSetting("plugins.security_analytics.index_timeout", TimeValue.timeValueSeconds(60), Setting.Property.NodeScope, Setting.Property.Dynamic); + + public static final Long DEFAULT_MAX_ACTIONABLE_ALERT_COUNT = 50L; + + public static final Setting ALERT_HISTORY_ENABLED = Setting.boolSetting( + "plugins.security_analytics.alert_history_enabled", + true, + Setting.Property.NodeScope, Setting.Property.Dynamic + ); + + public static final Setting FINDING_HISTORY_ENABLED = Setting.boolSetting( + "plugins.security_analytics.alert_finding_enabled", + true, + Setting.Property.NodeScope, Setting.Property.Dynamic + ); + + public static final Setting ALERT_HISTORY_ROLLOVER_PERIOD = Setting.positiveTimeSetting( + "plugins.security_analytics.alert_history_rollover_period", + TimeValue.timeValueHours(12), + Setting.Property.NodeScope, Setting.Property.Dynamic + ); + + public static final Setting FINDING_HISTORY_ROLLOVER_PERIOD = Setting.positiveTimeSetting( + "plugins.security_analytics.alert_finding_rollover_period", + TimeValue.timeValueHours(12), + Setting.Property.NodeScope, Setting.Property.Dynamic + ); + + public static final Setting ALERT_HISTORY_INDEX_MAX_AGE = Setting.positiveTimeSetting( + "plugins.security_analytics.alert_history_max_age", + new TimeValue(30, TimeUnit.DAYS), + Setting.Property.NodeScope, Setting.Property.Dynamic + ); + + public static final Setting FINDING_HISTORY_INDEX_MAX_AGE = Setting.positiveTimeSetting( + "plugins.security_analytics.finding_history_max_age", + new TimeValue(30, TimeUnit.DAYS), + Setting.Property.NodeScope, Setting.Property.Dynamic + ); + + public static final Setting ALERT_HISTORY_MAX_DOCS = Setting.longSetting( + "plugins.security_analytics.alert_history_max_docs", + 1000L, + 0L, + Setting.Property.NodeScope, Setting.Property.Dynamic + ); + + public static final Setting FINDING_HISTORY_MAX_DOCS = Setting.longSetting( + "plugins.security_analytics.alert_finding_max_docs", + 1000L, + 0L, + Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated + ); + + public static final Setting ALERT_HISTORY_RETENTION_PERIOD = Setting.positiveTimeSetting( + "plugins.security_analytics.alert_history_retention_period", + new TimeValue(60, TimeUnit.DAYS), + Setting.Property.NodeScope, Setting.Property.Dynamic + ); + + public static final Setting FINDING_HISTORY_RETENTION_PERIOD = Setting.positiveTimeSetting( + "plugins.security_analytics.finding_history_retention_period", + new TimeValue(60, TimeUnit.DAYS), + Setting.Property.NodeScope, Setting.Property.Dynamic + ); + + public static final Setting REQUEST_TIMEOUT = Setting.positiveTimeSetting( + "plugins.security_analytics.request_timeout", + TimeValue.timeValueSeconds(10), + Setting.Property.NodeScope, Setting.Property.Dynamic + ); + + public static final Setting MAX_ACTION_THROTTLE_VALUE = Setting.positiveTimeSetting( + "plugins.security_analytics.action_throttle_max_value", + TimeValue.timeValueHours(24), + Setting.Property.NodeScope, Setting.Property.Dynamic + ); + + public static final Setting FILTER_BY_BACKEND_ROLES = Setting.boolSetting( + "plugins.security_analytics.filter_by_backend_roles", + false, + Setting.Property.NodeScope, Setting.Property.Dynamic + ); + + public static final Setting IS_CORRELATION_INDEX_SETTING = Setting.boolSetting(CORRELATION_INDEX, false, Setting.Property.IndexScope); + + public static final Setting CORRELATION_TIME_WINDOW = Setting.positiveTimeSetting( + "plugins.security_analytics.correlation_time_window", + new TimeValue(5, TimeUnit.MINUTES), + Setting.Property.NodeScope, Setting.Property.Dynamic + ); } \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/transport/SecureTransportAction.java b/src/main/java/org/opensearch/securityanalytics/transport/SecureTransportAction.java new file mode 100644 index 000000000..fc48b9a1f --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/transport/SecureTransportAction.java @@ -0,0 +1,126 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.transport; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + + +import org.apache.lucene.search.join.ScoreMode; +import org.opensearch.commons.ConfigConstants; +import org.opensearch.commons.authuser.User; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.index.query.BoolQueryBuilder; +import org.opensearch.index.query.TermQueryBuilder; +import org.opensearch.index.query.TermsQueryBuilder; +import org.opensearch.index.query.MatchQueryBuilder; + + +import java.util.List; +import java.util.stream.Collectors; + +/** + * TransportAction classes extend this interface to add filter-by-backend-roles functionality. + * + * 1. If filterBy is enabled + * a) Don't allow to create detector (throw error) if the logged-on user has no backend roles configured. + * + * 2. If filterBy is enabled and detector are created when filterBy is disabled: + * a) If backend_roles are saved with config, results will get filtered and data is shown + * b) If backend_roles are not saved with detector config, results will get filtered and no detectors + * will be displayed. + * c) Users can edit and save the detector to associate their backend_roles. + * + */ +public interface SecureTransportAction { + + static final Logger log = LogManager.getLogger(SecureTransportAction.class); + + /** + * reads the user from the thread context that is later used to serialize and save in config + */ + default User readUserFromThreadContext(ThreadPool threadPool) { + String userStr = threadPool.getThreadContext().getTransient(ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT); + log.info("User and roles string from thread context: {}", userStr); + return User.parse(userStr); + } + + default boolean doFilterForUser(User user, boolean filterByEnabled ) { + log.debug("Is filterByEnabled: {} ; Is admin user: {}", filterByEnabled, isAdmin(user)); + if (isAdmin(user)) { + return false; + } else { + return filterByEnabled; + } + } + + /** + * 'all_access' role users are treated as admins. + */ + default boolean isAdmin(User user) { + if (user == null) { + return false; + } + if (user.getRoles().size() == 0) { + return false; + } + return user.getRoles().contains("all_access"); + } + + default String validateUserBackendRoles(User user, boolean filterByEnabled) { + if (filterByEnabled) { + if (user == null) { + return "Filter by user backend roles is enabled with security disabled."; + } else if (isAdmin(user)) { + return ""; + } else if (user.getBackendRoles().size() == 0) { + return "User doesn't have backend roles configured. Contact administrator"; + } + } + return ""; + } + + /** + * If FilterBy is enabled, this function verifies that the requester user has FilterBy permissions to access + * the resource. If FilterBy is disabled, we will assume the user has permissions and return true. + * + * This check will later to moved to the security plugin. + */ + default boolean checkUserPermissionsWithResource( + User requesterUser, + User resourceUser, + String resourceType, + String resourceId, + boolean filterByEnabled + ) { + + if (!doFilterForUser(requesterUser, filterByEnabled)) return true; + + List resourceBackendRoles = resourceUser.getBackendRoles(); + List requesterBackendRoles = requesterUser.getBackendRoles(); + + if (resourceBackendRoles == null ||requesterBackendRoles == null || isIntersectListsEmpty(resourceBackendRoles, requesterBackendRoles)) { + return false; + } + return true; + } + + + default boolean isIntersectListsEmpty(List a, List b) { + return (a.stream() + .distinct() + .filter(b::contains) + .collect(Collectors.toSet()).size()==0); + } + + default void addFilter(User user,SearchSourceBuilder searchSourceBuilder,String fieldName) { + BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery().must(searchSourceBuilder.query()); + boolQueryBuilder.filter(QueryBuilders.nestedQuery("detector", QueryBuilders.termsQuery(fieldName, user.getBackendRoles()), ScoreMode.Avg)); + searchSourceBuilder.query(boolQueryBuilder); + } + +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportAcknowledgeAlertsAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportAcknowledgeAlertsAction.java index c56ce686c..6535901a8 100644 --- a/src/main/java/org/opensearch/securityanalytics/transport/TransportAcknowledgeAlertsAction.java +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportAcknowledgeAlertsAction.java @@ -7,15 +7,20 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.OpenSearchException; +import org.opensearch.OpenSearchStatusException; import org.opensearch.action.ActionListener; import org.opensearch.action.StepListener; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.HandledTransportAction; import org.opensearch.client.Client; +import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.inject.Inject; -import org.opensearch.common.xcontent.NamedXContentRegistry; +import org.opensearch.common.settings.Settings; import org.opensearch.commons.alerting.action.GetAlertsResponse; import org.opensearch.commons.alerting.model.Table; +import org.opensearch.commons.authuser.User; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.rest.RestStatus; import org.opensearch.securityanalytics.action.AckAlertsRequest; import org.opensearch.securityanalytics.action.AckAlertsResponse; import org.opensearch.securityanalytics.action.AckAlertsAction; @@ -23,28 +28,51 @@ import org.opensearch.securityanalytics.action.GetDetectorResponse; import org.opensearch.securityanalytics.alerts.AlertsService; import org.opensearch.securityanalytics.model.Detector; +import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; import org.opensearch.tasks.Task; +import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; -public class TransportAcknowledgeAlertsAction extends HandledTransportAction { +public class TransportAcknowledgeAlertsAction extends HandledTransportAction implements SecureTransportAction { private final TransportGetDetectorAction transportGetDetectorAction; private final NamedXContentRegistry xContentRegistry; + private final ClusterService clusterService; + + private final Settings settings; + + private final ThreadPool threadPool; private final AlertsService alertsService; + private volatile Boolean filterByEnabled; + private static final Logger log = LogManager.getLogger(TransportAcknowledgeAlertsAction.class); @Inject - public TransportAcknowledgeAlertsAction(TransportService transportService, ActionFilters actionFilters, TransportGetDetectorAction transportGetDetectorAction, NamedXContentRegistry xContentRegistry, Client client) { + public TransportAcknowledgeAlertsAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService, ThreadPool threadPool, Settings settings, TransportGetDetectorAction transportGetDetectorAction, NamedXContentRegistry xContentRegistry, Client client) { super(AckAlertsAction.NAME, transportService, actionFilters, AckAlertsRequest::new); this.transportGetDetectorAction = transportGetDetectorAction; this.xContentRegistry = xContentRegistry; + this.clusterService = clusterService; + this.threadPool = threadPool; + this.settings = settings; + this.filterByEnabled = SecurityAnalyticsSettings.FILTER_BY_BACKEND_ROLES.get(this.settings); this.alertsService = new AlertsService(client); + this.clusterService.getClusterSettings().addSettingsUpdateConsumer(SecurityAnalyticsSettings.FILTER_BY_BACKEND_ROLES, this::setFilterByEnabled); } @Override protected void doExecute(Task task, AckAlertsRequest request, ActionListener actionListener) { + + User user = readUserFromThreadContext(this.threadPool); + + String validateBackendRoleMessage = validateUserBackendRoles(user, this.filterByEnabled); + if (!"".equals(validateBackendRoleMessage)) { + actionListener.onFailure(new OpenSearchStatusException("Do not have permissions to resource", RestStatus.FORBIDDEN)); + return; + } + GetDetectorRequest getDetectorRequest = new GetDetectorRequest(request.getDetectorId(), -3L); transportGetDetectorAction.doExecute(task, getDetectorRequest, new ActionListener() { @Override @@ -76,4 +104,8 @@ private boolean isDetectorAlertsMonitorMismatch(Detector detector, GetAlertsResp return getAlertsResponse.getAlerts().stream() .anyMatch(alert -> false == detector.getMonitorIds().contains(alert.getMonitorId())) ; } + + private void setFilterByEnabled(boolean filterByEnabled) { + this.filterByEnabled = filterByEnabled; + } } diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportCorrelateFindingAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportCorrelateFindingAction.java new file mode 100644 index 000000000..8aa3b5cfd --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportCorrelateFindingAction.java @@ -0,0 +1,403 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.transport; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.search.join.ScoreMode; +import org.opensearch.OpenSearchStatusException; +import org.opensearch.action.ActionListener; +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionRunnable; +import org.opensearch.action.admin.indices.create.CreateIndexResponse; +import org.opensearch.action.bulk.BulkResponse; +import org.opensearch.action.index.IndexRequest; +import org.opensearch.action.index.IndexResponse; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.HandledTransportAction; +import org.opensearch.action.support.WriteRequest; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.client.Client; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.inject.Inject; +import org.opensearch.common.io.stream.InputStreamStreamInput; +import org.opensearch.common.io.stream.OutputStreamStreamOutput; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.commons.alerting.model.Finding; +import org.opensearch.commons.alerting.action.PublishFindingsRequest; +import org.opensearch.commons.alerting.action.SubscribeFindingsResponse; +import org.opensearch.commons.alerting.action.AlertingActions; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.index.query.BoolQueryBuilder; +import org.opensearch.index.query.NestedQueryBuilder; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.rest.RestStatus; +import org.opensearch.search.SearchHit; +import org.opensearch.search.SearchHits; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.securityanalytics.correlation.JoinEngine; +import org.opensearch.securityanalytics.correlation.VectorEmbeddingsEngine; +import org.opensearch.securityanalytics.model.Detector; +import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; +import org.opensearch.securityanalytics.util.CorrelationIndices; +import org.opensearch.securityanalytics.util.DetectorIndices; +import org.opensearch.securityanalytics.util.IndexUtils; +import org.opensearch.securityanalytics.util.SecurityAnalyticsException; +import org.opensearch.tasks.Task; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; + +public class TransportCorrelateFindingAction extends HandledTransportAction implements SecureTransportAction { + + private static final Logger log = LogManager.getLogger(TransportCorrelateFindingAction.class); + + private final DetectorIndices detectorIndices; + + private final CorrelationIndices correlationIndices; + + private final ClusterService clusterService; + + private final Settings settings; + + private final Client client; + + private final NamedXContentRegistry xContentRegistry; + + private final ThreadPool threadPool; + + private volatile TimeValue indexTimeout; + + private volatile long corrTimeWindow; + + private volatile long setupTimestamp; + + @Inject + public TransportCorrelateFindingAction(TransportService transportService, + Client client, + NamedXContentRegistry xContentRegistry, + DetectorIndices detectorIndices, + CorrelationIndices correlationIndices, + ClusterService clusterService, + Settings settings, + ActionFilters actionFilters) { + super(AlertingActions.SUBSCRIBE_FINDINGS_ACTION_NAME, transportService, actionFilters, PublishFindingsRequest::new); + this.client = client; + this.xContentRegistry = xContentRegistry; + this.detectorIndices = detectorIndices; + this.correlationIndices = correlationIndices; + this.clusterService = clusterService; + this.settings = settings; + this.threadPool = this.detectorIndices.getThreadPool(); + + this.indexTimeout = SecurityAnalyticsSettings.INDEX_TIMEOUT.get(this.settings); + this.corrTimeWindow = SecurityAnalyticsSettings.CORRELATION_TIME_WINDOW.get(this.settings).getMillis(); + this.clusterService.getClusterSettings().addSettingsUpdateConsumer(SecurityAnalyticsSettings.INDEX_TIMEOUT, it -> indexTimeout = it); + this.clusterService.getClusterSettings().addSettingsUpdateConsumer(SecurityAnalyticsSettings.CORRELATION_TIME_WINDOW, it -> corrTimeWindow = it.getMillis()); + this.setupTimestamp = System.currentTimeMillis(); + } + + @Override + protected void doExecute(Task task, ActionRequest request, ActionListener actionListener) { + try { + log.info("hit here1"); + PublishFindingsRequest transformedRequest = transformRequest(request); + + if (!this.correlationIndices.correlationIndexExists()) { + try { + this.correlationIndices.initCorrelationIndex(new ActionListener<>() { + @Override + public void onResponse(CreateIndexResponse response) { + if (response.isAcknowledged()) { + IndexUtils.correlationIndexUpdated(); + correlationIndices.setupCorrelationIndex(indexTimeout, setupTimestamp, new ActionListener() { + @Override + public void onResponse(BulkResponse response) { + if (response.hasFailures()) { + log.error(new OpenSearchStatusException(response.toString(), RestStatus.INTERNAL_SERVER_ERROR)); + } + + AsyncCorrelateFindingAction correlateFindingAction = new AsyncCorrelateFindingAction(task, transformedRequest, actionListener); + correlateFindingAction.start(); + } + + @Override + public void onFailure(Exception e) { + log.error(e); + } + }); + } else { + log.error(new OpenSearchStatusException("Failed to create correlation Index", RestStatus.INTERNAL_SERVER_ERROR)); + } + } + + @Override + public void onFailure(Exception e) { + log.error(e); + } + }); + } catch (IOException ex) { + log.error(ex); + } + } else { + log.info("hit here2"); + AsyncCorrelateFindingAction correlateFindingAction = new AsyncCorrelateFindingAction(task, transformedRequest, actionListener); + correlateFindingAction.start(); + } + } catch (IOException e) { + throw new SecurityAnalyticsException("Unknown exception occurred", RestStatus.INTERNAL_SERVER_ERROR, e); + } + } + + public class AsyncCorrelateFindingAction { + private final PublishFindingsRequest request; + private final JoinEngine joinEngine; + private final VectorEmbeddingsEngine vectorEmbeddingsEngine; + + private final ActionListener listener; + private final AtomicReference response; + private final AtomicBoolean counter = new AtomicBoolean(); + private final Task task; + + AsyncCorrelateFindingAction(Task task, PublishFindingsRequest request, ActionListener listener) { + this.task = task; + this.request = request; + this.listener = listener; + + this.response =new AtomicReference<>(); + + this.joinEngine = new JoinEngine(client, request, xContentRegistry, corrTimeWindow, this); + this.vectorEmbeddingsEngine = new VectorEmbeddingsEngine(client, indexTimeout, corrTimeWindow, this); + log.info("hit here5"); + } + + void start() { + log.info("hit here4"); + TransportCorrelateFindingAction.this.threadPool.getThreadContext().stashContext(); + String monitorId = request.getMonitorId(); + Finding finding = request.getFinding(); + + if (detectorIndices.detectorIndexExists()) { + NestedQueryBuilder queryBuilder = + QueryBuilders.nestedQuery( + "detector", + QueryBuilders.matchQuery( + "detector.monitor_id", + monitorId + ), + ScoreMode.None + ); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.query(queryBuilder); + searchSourceBuilder.fetchSource(true); + searchSourceBuilder.size(1); + SearchRequest searchRequest = new SearchRequest(); + searchRequest.indices(Detector.DETECTORS_INDEX); + searchRequest.source(searchSourceBuilder); + + client.search(searchRequest, new ActionListener<>() { + @Override + public void onResponse(SearchResponse response) { + if (response.isTimedOut()) { + onFailures(new OpenSearchStatusException(response.toString(), RestStatus.REQUEST_TIMEOUT)); + } + + SearchHits hits = response.getHits(); + // Detectors Index hits count could be more even if we fetch one + if (hits.getTotalHits().value >= 1 && hits.getHits().length > 0) { + try { + SearchHit hit = hits.getAt(0); + + XContentParser xcp = XContentType.JSON.xContent().createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, hit.getSourceAsString() + ); + Detector detector = Detector.docParse(xcp, hit.getId(), hit.getVersion()); + joinEngine.onSearchDetectorResponse(detector, finding); + } catch (IOException e) { + log.error("IOException for request {}", searchRequest.toString(), e); + onFailures(e); + } + } else { + onFailures(new OpenSearchStatusException("detector not found given monitor id", RestStatus.INTERNAL_SERVER_ERROR)); + } + } + + @Override + public void onFailure(Exception e) { + onFailures(e); + } + }); + } else { + onFailures(new SecurityAnalyticsException(String.format(Locale.getDefault(), "Detector index %s doesnt exist", Detector.DETECTORS_INDEX), RestStatus.INTERNAL_SERVER_ERROR, new RuntimeException())); + } + } + + public void initCorrelationIndex(String detectorType, Map> correlatedFindings, List correlationRules) { + try { + log.info("hit here6"); + if (!IndexUtils.correlationIndexUpdated) { + IndexUtils.updateIndexMapping( + CorrelationIndices.CORRELATION_INDEX, + CorrelationIndices.correlationMappings(), clusterService.state(), client.admin().indices(), + new ActionListener<>() { + @Override + public void onResponse(AcknowledgedResponse response) { + if (response.isAcknowledged()) { + IndexUtils.correlationIndexUpdated(); + getTimestampFeature(detectorType, correlatedFindings, null, correlationRules); + } else { + onFailures(new OpenSearchStatusException("Failed to create correlation Index", RestStatus.INTERNAL_SERVER_ERROR)); + } + } + + @Override + public void onFailure(Exception e) { + onFailures(e); + } + } + ); + } else { + getTimestampFeature(detectorType, correlatedFindings, null, correlationRules); + } + } catch (IOException ex) { + onFailures(ex); + } + } + + public void getTimestampFeature(String detectorType, Map> correlatedFindings, Finding orphanFinding, List correlationRules) { + log.info("hit here7"); + long findingTimestamp = this.request.getFinding().getTimestamp().toEpochMilli(); + BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery() + .mustNot(QueryBuilders.termQuery("scoreTimestamp", 0L)); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.query(queryBuilder); + searchSourceBuilder.fetchSource(true); + searchSourceBuilder.size(1); + SearchRequest searchRequest = new SearchRequest(); + searchRequest.indices(CorrelationIndices.CORRELATION_INDEX); + searchRequest.source(searchSourceBuilder); + + client.search(searchRequest, new ActionListener<>() { + @Override + public void onResponse(SearchResponse response) { + String id = response.getHits().getHits()[0].getId(); + Map hitSource = response.getHits().getHits()[0].getSourceAsMap(); + long scoreTimestamp = (long) hitSource.get("scoreTimestamp"); + + if (findingTimestamp - CorrelationIndices.FIXED_HISTORICAL_INTERVAL > scoreTimestamp) { + try { + XContentBuilder scoreBuilder = XContentFactory.jsonBuilder().startObject(); + scoreBuilder.field("scoreTimestamp", findingTimestamp - CorrelationIndices.FIXED_HISTORICAL_INTERVAL); + scoreBuilder.field("root", false); + scoreBuilder.endObject(); + + IndexRequest scoreIndexRequest = new IndexRequest(CorrelationIndices.CORRELATION_INDEX) + .id(id) + .source(scoreBuilder) + .timeout(indexTimeout) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + client.index(scoreIndexRequest, new ActionListener<>() { + @Override + public void onResponse(IndexResponse response) { + if (correlatedFindings != null) { + if (correlatedFindings.isEmpty()) { + vectorEmbeddingsEngine.insertOrphanFindings(detectorType, request.getFinding(), Long.valueOf(CorrelationIndices.FIXED_HISTORICAL_INTERVAL / 1000L).floatValue()); + } + for (Map.Entry> correlatedFinding : correlatedFindings.entrySet()) { + vectorEmbeddingsEngine.insertCorrelatedFindings(detectorType, request.getFinding(), correlatedFinding.getKey(), correlatedFinding.getValue(), + Long.valueOf(CorrelationIndices.FIXED_HISTORICAL_INTERVAL / 1000L).floatValue(), correlationRules); + } + } else { + vectorEmbeddingsEngine.insertOrphanFindings(detectorType, orphanFinding, Long.valueOf(CorrelationIndices.FIXED_HISTORICAL_INTERVAL / 1000L).floatValue()); + } + } + + @Override + public void onFailure(Exception e) { + onFailures(e); + } + }); + } catch (Exception ex) { + onFailures(ex); + } + } else { + float timestampFeature = Long.valueOf((findingTimestamp - scoreTimestamp) / 1000L).floatValue(); + if (correlatedFindings != null) { + if (correlatedFindings.isEmpty()) { + vectorEmbeddingsEngine.insertOrphanFindings(detectorType, request.getFinding(), timestampFeature); + } + for (Map.Entry> correlatedFinding : correlatedFindings.entrySet()) { + vectorEmbeddingsEngine.insertCorrelatedFindings(detectorType, request.getFinding(), correlatedFinding.getKey(), correlatedFinding.getValue(), + timestampFeature, correlationRules); + } + } else { + vectorEmbeddingsEngine.insertOrphanFindings(detectorType, orphanFinding, timestampFeature); + } + } + } + + @Override + public void onFailure(Exception e) { + onFailures(e); + } + }); + } + + public void onOperation() { + this.response.set(RestStatus.OK); + if (counter.compareAndSet(false, true)) { + finishHim(null); + } + } + + public void onFailures(Exception t) { + if (counter.compareAndSet(false, true)) { + finishHim(t); + } + } + + private void finishHim(Exception t) { + threadPool.executor(ThreadPool.Names.GENERIC).execute(ActionRunnable.supply(listener, () -> { + if (t != null) { + if (t instanceof OpenSearchStatusException) { + throw t; + } + throw SecurityAnalyticsException.wrap(t); + } else { + return new SubscribeFindingsResponse(RestStatus.OK); + } + })); + } + } + + private PublishFindingsRequest transformRequest(ActionRequest request) throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + OutputStreamStreamOutput osso = new OutputStreamStreamOutput(baos); + request.writeTo(osso); + + ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); + InputStreamStreamInput issi = new InputStreamStreamInput(bais); + return new PublishFindingsRequest(issi); + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportCreateIndexMappingsAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportCreateIndexMappingsAction.java index 4b2c7a384..162c45c16 100644 --- a/src/main/java/org/opensearch/securityanalytics/transport/TransportCreateIndexMappingsAction.java +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportCreateIndexMappingsAction.java @@ -15,31 +15,34 @@ import org.opensearch.securityanalytics.mapper.MapperService; import org.opensearch.securityanalytics.action.CreateIndexMappingsRequest; import org.opensearch.tasks.Task; +import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; public class TransportCreateIndexMappingsAction extends HandledTransportAction { private MapperService mapperService; private ClusterService clusterService; + private final ThreadPool threadPool; + + @Inject public TransportCreateIndexMappingsAction( TransportService transportService, ActionFilters actionFilters, + ThreadPool threadPool, MapperService mapperService, ClusterService clusterService ) { super(CreateIndexMappingsAction.NAME, transportService, actionFilters, CreateIndexMappingsRequest::new); this.clusterService = clusterService; this.mapperService = mapperService; + this.threadPool = threadPool; } @Override protected void doExecute(Task task, CreateIndexMappingsRequest request, ActionListener actionListener) { - IndexMetadata index = clusterService.state().metadata().index(request.getIndexName()); - if (index == null) { - actionListener.onFailure(new IllegalStateException("Could not find index [" + request.getIndexName() + "]")); - return; - } + this.threadPool.getThreadContext().stashContext(); + mapperService.createMappingAction( request.getIndexName(), request.getRuleTopic(), diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportDeleteCorrelationRuleAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportDeleteCorrelationRuleAction.java new file mode 100644 index 000000000..da6e5c175 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportDeleteCorrelationRuleAction.java @@ -0,0 +1,83 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.securityanalytics.transport; + +import java.util.Locale; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.OpenSearchStatusException; +import org.opensearch.action.ActionListener; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.HandledTransportAction; +import org.opensearch.action.support.WriteRequest; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.client.Client; +import org.opensearch.common.inject.Inject; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.index.reindex.BulkByScrollResponse; +import org.opensearch.index.reindex.DeleteByQueryAction; +import org.opensearch.index.reindex.DeleteByQueryRequestBuilder; +import org.opensearch.rest.RestStatus; +import org.opensearch.securityanalytics.action.DeleteCorrelationRuleAction; +import org.opensearch.securityanalytics.action.DeleteCorrelationRuleRequest; +import org.opensearch.securityanalytics.model.CorrelationRule; +import org.opensearch.securityanalytics.util.SecurityAnalyticsException; +import org.opensearch.tasks.Task; +import org.opensearch.transport.TransportService; + +public class TransportDeleteCorrelationRuleAction extends HandledTransportAction { + + private static final Logger log = LogManager.getLogger(TransportDeleteCorrelationRuleAction.class); + + private final Client client; + + @Inject + public TransportDeleteCorrelationRuleAction( + TransportService transportService, + Client client, + ActionFilters actionFilters + ) { + super(DeleteCorrelationRuleAction.NAME, transportService, actionFilters, DeleteCorrelationRuleRequest::new); + this.client = client; + } + + @Override + protected void doExecute(Task task, DeleteCorrelationRuleRequest request, ActionListener listener) { + String correlationRuleId = request.getCorrelationRuleId(); + WriteRequest.RefreshPolicy refreshPolicy = request.getRefreshPolicy(); + log.debug("Deleting Correlation Rule with id: " + correlationRuleId); + + new DeleteByQueryRequestBuilder(client, DeleteByQueryAction.INSTANCE) + .source(CorrelationRule.CORRELATION_RULE_INDEX) + .filter(QueryBuilders.matchQuery("_id", correlationRuleId)) + .execute(new ActionListener<>() { + @Override + public void onResponse(BulkByScrollResponse response) { + if (response.isTimedOut()) { + listener.onFailure( + new OpenSearchStatusException( + String.format( + Locale.getDefault(), + "Correlation Rule with id %s cannot be deleted", + correlationRuleId + ), + RestStatus.INTERNAL_SERVER_ERROR) + ); + return; + } + listener.onResponse(new AcknowledgedResponse(true)); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(SecurityAnalyticsException.wrap(e)); + } + }); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportDeleteDetectorAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportDeleteDetectorAction.java index 67d168ccb..decf798a9 100644 --- a/src/main/java/org/opensearch/securityanalytics/transport/TransportDeleteDetectorAction.java +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportDeleteDetectorAction.java @@ -4,9 +4,14 @@ */ package org.opensearch.securityanalytics.transport; +import java.util.Collection; +import java.util.List; +import java.util.Locale; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.lucene.util.SetOnce; +import org.opensearch.common.SetOnce; import org.opensearch.OpenSearchStatusException; import org.opensearch.action.ActionListener; import org.opensearch.action.ActionRunnable; @@ -14,40 +19,34 @@ import org.opensearch.action.delete.DeleteResponse; import org.opensearch.action.get.GetRequest; import org.opensearch.action.get.GetResponse; -import org.opensearch.action.search.SearchResponse; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.GroupedActionListener; import org.opensearch.action.support.HandledTransportAction; import org.opensearch.action.support.WriteRequest; -import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.client.Client; import org.opensearch.client.node.NodeClient; import org.opensearch.common.inject.Inject; import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.common.xcontent.NamedXContentRegistry; import org.opensearch.common.xcontent.XContentHelper; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.commons.alerting.AlertingPluginInterface; import org.opensearch.commons.alerting.action.DeleteMonitorRequest; import org.opensearch.commons.alerting.action.DeleteMonitorResponse; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.rest.RestStatus; import org.opensearch.securityanalytics.action.DeleteDetectorAction; import org.opensearch.securityanalytics.action.DeleteDetectorRequest; import org.opensearch.securityanalytics.action.DeleteDetectorResponse; +import org.opensearch.securityanalytics.mapper.IndexTemplateManager; import org.opensearch.securityanalytics.model.Detector; +import org.opensearch.securityanalytics.util.DetectorIndices; import org.opensearch.securityanalytics.util.RuleTopicIndices; import org.opensearch.securityanalytics.util.SecurityAnalyticsException; import org.opensearch.tasks.Task; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; -import java.io.IOException; -import java.util.Collection; -import java.util.List; -import java.util.Locale; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicReference; import static org.opensearch.securityanalytics.model.Detector.NO_VERSION; @@ -63,18 +62,24 @@ public class TransportDeleteDetectorAction extends HandledTransportAction listener) { - AsyncDeleteDetectorAction asyncAction = new AsyncDeleteDetectorAction(task, request, listener); + AsyncDeleteDetectorAction asyncAction = new AsyncDeleteDetectorAction(task, request, listener, detectorIndices); asyncAction.start(); } @@ -95,17 +100,32 @@ class AsyncDeleteDetectorAction { private final ActionListener listener; private final AtomicReference response; private final AtomicBoolean counter = new AtomicBoolean(); + private final DetectorIndices detectorIndices; private final Task task; - AsyncDeleteDetectorAction(Task task, DeleteDetectorRequest request, ActionListener listener) { + AsyncDeleteDetectorAction( + Task task, + DeleteDetectorRequest request, + ActionListener listener, + DetectorIndices detectorIndices) { this.task = task; this.request = request; this.listener = listener; - this.response = new AtomicReference<>(); + this.detectorIndices = detectorIndices; } void start() { + if (!detectorIndices.detectorIndexExists()) { + onFailures(new OpenSearchStatusException( + String.format(Locale.getDefault(), + "Detector with %s is not found", + request.getDetectorId()), + RestStatus.NOT_FOUND)); + return; + + } + TransportDeleteDetectorAction.this.threadPool.getThreadContext().stashContext(); String detectorId = request.getDetectorId(); GetRequest getRequest = new GetRequest(Detector.DETECTORS_INDEX, detectorId); client.get(getRequest, @@ -129,14 +149,13 @@ public void onResponse(GetResponse response) { @Override public void onFailure(Exception t) { - onFailures(t); + onFailures(new OpenSearchStatusException(String.format(Locale.getDefault(), "Detector with %s is not found", detectorId), RestStatus.NOT_FOUND)); } }); } private void onGetResponse(Detector detector) { List monitorIds = detector.getMonitorIds(); - String ruleIndex = detector.getRuleIndex(); ActionListener deletesListener = new GroupedActionListener<>(new ActionListener<>() { @Override public void onResponse(Collection responses) { @@ -151,54 +170,18 @@ public void onResponse(Collection responses) { }).count() > 0) { onFailures(new OpenSearchStatusException("Monitor associated with detected could not be deleted", errorStatusSupplier.get())); } - ruleTopicIndices.countQueries(ruleIndex, new ActionListener<>() { - @Override - public void onResponse(SearchResponse response) { - if (response.isTimedOut()) { - log.info("Count response timed out"); - deleteDetectorFromConfig(detector.getId(), request.getRefreshPolicy()); - } else { - long count = response.getHits().getTotalHits().value; - - if (count == 0) { - try { - ruleTopicIndices.deleteRuleTopicIndex(ruleIndex, - new ActionListener<>() { - @Override - public void onResponse(AcknowledgedResponse response) { - deleteDetectorFromConfig(detector.getId(), request.getRefreshPolicy()); - } - - @Override - public void onFailure(Exception e) { - // error is suppressed as it is not a critical deletion - log.info(e.getMessage()); - deleteDetectorFromConfig(detector.getId(), request.getRefreshPolicy()); - } - }); - } catch (IOException e) { - deleteDetectorFromConfig(detector.getId(), request.getRefreshPolicy()); - } - } else { - deleteDetectorFromConfig(detector.getId(), request.getRefreshPolicy()); - } - } - } - - @Override - public void onFailure(Exception e) { - // error is suppressed as it is not a critical deletion - log.info(e.getMessage()); - - - } - }); + deleteDetectorFromConfig(detector.getId(), request.getRefreshPolicy()); } @Override public void onFailure(Exception e) { - if (counter.compareAndSet(false, true)) { - finishHim(null, e); + if(isOnlyMonitorOrIndexMissingExceptionThrownByGroupedActionListener(e, detector.getId())) { + deleteDetectorFromConfig(detector.getId(), request.getRefreshPolicy()); + } else { + log.error(String.format(Locale.ROOT, "Failed to delete detector %s", detector.getId()), e); + if (counter.compareAndSet(false, true)) { + finishHim(null, e); + } } } }, monitorIds.size()); @@ -213,9 +196,21 @@ private void deleteDetectorFromConfig(String detectorId, WriteRequest.RefreshPol new ActionListener<>() { @Override public void onResponse(DeleteResponse response) { - onOperation(response); - } + indexTemplateManager.deleteAllUnusedTemplates(new ActionListener() { + @Override + public void onResponse(Void unused) { + onOperation(response); + } + + @Override + public void onFailure(Exception e) { + log.error("Error deleting unused templates: " + e.getMessage()); + onOperation(response); + } + }); + + } @Override public void onFailure(Exception t) { onFailures(t); @@ -231,6 +226,7 @@ private void onOperation(DeleteResponse response) { } private void onFailures(Exception t) { + log.error(String.format(Locale.ROOT, "Failed to delete detector")); if (counter.compareAndSet(false, true)) { finishHim(null, t); } @@ -239,11 +235,39 @@ private void onFailures(Exception t) { private void finishHim(String detectorId, Exception t) { threadPool.executor(ThreadPool.Names.GENERIC).execute(ActionRunnable.supply(listener, () -> { if (t != null) { + log.error(String.format(Locale.ROOT, "Failed to delete detector %s",detectorId), t); + if (t instanceof OpenSearchStatusException) { + throw t; + } throw SecurityAnalyticsException.wrap(t); } else { return new DeleteDetectorResponse(detectorId, NO_VERSION, RestStatus.NO_CONTENT); } })); } + + private boolean isOnlyMonitorOrIndexMissingExceptionThrownByGroupedActionListener( + Exception ex, + String detectorId + ) { + // grouped action listener listens on mutliple listeners but throws only one exception. If multiple + // listeners fail the other exceptions are added as suppressed exceptions to the first failure. + int len = ex.getSuppressed().length; + for (int i = 0; i <= len; i++) { + Throwable e = i == len ? ex : ex.getSuppressed()[i]; + if (e.getMessage().matches("(.*)Monitor(.*) is not found(.*)") + || e.getMessage().contains( + "Configured indices are not found: [.opendistro-alerting-config]") + ) { + log.error( + String.format(Locale.ROOT, "Monitor or jobs index already deleted." + + " Proceeding with detector %s deletion", detectorId), + e); + } else { + return false; + } + } + return true; + } } } \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportDeleteRuleAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportDeleteRuleAction.java index bec1ab5ff..639f8c823 100644 --- a/src/main/java/org/opensearch/securityanalytics/transport/TransportDeleteRuleAction.java +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportDeleteRuleAction.java @@ -19,10 +19,10 @@ import org.opensearch.client.Client; import org.opensearch.common.inject.Inject; import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.common.xcontent.NamedXContentRegistry; import org.opensearch.common.xcontent.XContentHelper; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.reindex.BulkByScrollResponse; @@ -126,7 +126,7 @@ public void onResponse(GetResponse response) { @Override public void onFailure(Exception e) { - onFailures(e); + onFailures(new OpenSearchStatusException(String.format(Locale.getDefault(), "Rule with %s is not found", ruleId), RestStatus.NOT_FOUND)); } }); } @@ -273,6 +273,9 @@ private void onFailures(Exception t) { private void finishHim(String ruleId, Exception t) { threadPool.executor(ThreadPool.Names.GENERIC).execute(ActionRunnable.supply(listener, () -> { if (t != null) { + if (t instanceof OpenSearchStatusException) { + throw t; + } throw SecurityAnalyticsException.wrap(t); } else { return new DeleteRuleResponse(ruleId, NO_VERSION, RestStatus.NO_CONTENT); diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportGetAlertsAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportGetAlertsAction.java index 454eb09d4..bfc73ac97 100644 --- a/src/main/java/org/opensearch/securityanalytics/transport/TransportGetAlertsAction.java +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportGetAlertsAction.java @@ -10,16 +10,21 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.search.join.ScoreMode; +import org.opensearch.OpenSearchStatusException; import org.opensearch.action.ActionListener; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.HandledTransportAction; import org.opensearch.client.Client; +import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.inject.Inject; -import org.opensearch.common.xcontent.NamedXContentRegistry; +import org.opensearch.common.settings.Settings; +import org.opensearch.commons.authuser.User; +import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.index.query.NestedQueryBuilder; import org.opensearch.index.query.QueryBuilders; +import org.opensearch.rest.RestStatus; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.securityanalytics.action.GetAlertsAction; import org.opensearch.securityanalytics.action.GetAlertsRequest; @@ -27,33 +32,58 @@ import org.opensearch.securityanalytics.action.SearchDetectorRequest; import org.opensearch.securityanalytics.alerts.AlertsService; import org.opensearch.securityanalytics.model.Detector; +import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; import org.opensearch.securityanalytics.util.DetectorUtils; +import org.opensearch.securityanalytics.util.SecurityAnalyticsException; import org.opensearch.tasks.Task; +import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; import static org.opensearch.securityanalytics.util.DetectorUtils.DETECTOR_TYPE_PATH; -public class TransportGetAlertsAction extends HandledTransportAction { +public class TransportGetAlertsAction extends HandledTransportAction implements SecureTransportAction { private final TransportSearchDetectorAction transportSearchDetectorAction; private final NamedXContentRegistry xContentRegistry; + private final ClusterService clusterService; + + private final Settings settings; + + private final ThreadPool threadPool; + private final AlertsService alertsService; + private volatile Boolean filterByEnabled; + private static final Logger log = LogManager.getLogger(TransportGetAlertsAction.class); @Inject - public TransportGetAlertsAction(TransportService transportService, ActionFilters actionFilters, TransportSearchDetectorAction transportSearchDetectorAction, NamedXContentRegistry xContentRegistry, Client client) { + public TransportGetAlertsAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService, TransportSearchDetectorAction transportSearchDetectorAction, ThreadPool threadPool, Settings settings, NamedXContentRegistry xContentRegistry, Client client) { super(GetAlertsAction.NAME, transportService, actionFilters, GetAlertsRequest::new); this.transportSearchDetectorAction = transportSearchDetectorAction; this.xContentRegistry = xContentRegistry; this.alertsService = new AlertsService(client); + this.clusterService = clusterService; + this.threadPool = threadPool; + this.settings = settings; + this.filterByEnabled = SecurityAnalyticsSettings.FILTER_BY_BACKEND_ROLES.get(this.settings); + this.clusterService.getClusterSettings().addSettingsUpdateConsumer(SecurityAnalyticsSettings.FILTER_BY_BACKEND_ROLES, this::setFilterByEnabled); } @Override protected void doExecute(Task task, GetAlertsRequest request, ActionListener actionListener) { + + User user = readUserFromThreadContext(this.threadPool); + + String validateBackendRoleMessage = validateUserBackendRoles(user, this.filterByEnabled); + if (!"".equals(validateBackendRoleMessage)) { + actionListener.onFailure(new OpenSearchStatusException("Do not have permissions to resource", RestStatus.FORBIDDEN)); + return; + } + if (request.getDetectorType() == null) { alertsService.getAlertsByDetectorId( request.getDetectorId(), @@ -70,7 +100,7 @@ protected void doExecute(Task task, GetAlertsRequest request, ActionListener detectors = DetectorUtils.getDetectors(searchResponse, xContentRegistry); + if (detectors.size() == 0) { + actionListener.onFailure( + SecurityAnalyticsException.wrap( + new OpenSearchStatusException( + "No detectors found for provided type", RestStatus.NOT_FOUND + ) + ) + ); + return; + } alertsService.getAlerts( detectors, request.getDetectorType(), @@ -109,4 +149,7 @@ public void onFailure(Exception e) { } } + private void setFilterByEnabled(boolean filterByEnabled) { + this.filterByEnabled = filterByEnabled; + } } \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportGetAllRuleCategoriesAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportGetAllRuleCategoriesAction.java new file mode 100644 index 000000000..34ff899bb --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportGetAllRuleCategoriesAction.java @@ -0,0 +1,41 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.transport; + +import org.opensearch.action.ActionListener; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.HandledTransportAction; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.inject.Inject; +import org.opensearch.securityanalytics.action.GetAllRuleCategoriesAction; +import org.opensearch.securityanalytics.action.GetAllRuleCategoriesRequest; +import org.opensearch.securityanalytics.action.GetAllRuleCategoriesResponse; +import org.opensearch.securityanalytics.model.RuleCategory; +import org.opensearch.tasks.Task; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; + +public class TransportGetAllRuleCategoriesAction extends HandledTransportAction { + + private final ThreadPool threadPool; + + @Inject + public TransportGetAllRuleCategoriesAction( + TransportService transportService, + ActionFilters actionFilters, + GetAllRuleCategoriesAction getAllRuleCategoriesAction, + ClusterService clusterService, + ThreadPool threadPool + ) { + super(getAllRuleCategoriesAction.NAME, transportService, actionFilters, GetAllRuleCategoriesRequest::new); + this.threadPool = threadPool; + } + + @Override + protected void doExecute(Task task, GetAllRuleCategoriesRequest request, ActionListener actionListener) { + this.threadPool.getThreadContext().stashContext(); + actionListener.onResponse(new GetAllRuleCategoriesResponse(RuleCategory.ALL_RULE_CATEGORIES)); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportGetDetectorAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportGetDetectorAction.java index 08aa224bb..47c2e07e5 100644 --- a/src/main/java/org/opensearch/securityanalytics/transport/TransportGetDetectorAction.java +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportGetDetectorAction.java @@ -14,28 +14,29 @@ import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.HandledTransportAction; import org.opensearch.client.Client; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.commons.authuser.User; import org.opensearch.common.inject.Inject; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.rest.RestResponse; import org.opensearch.rest.BytesRestResponse; - import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.common.xcontent.NamedXContentRegistry; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; - +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.rest.RestStatus; import org.opensearch.securityanalytics.action.GetDetectorAction; import org.opensearch.securityanalytics.model.Detector; import org.opensearch.securityanalytics.model.DetectorInput; - import org.opensearch.securityanalytics.action.GetDetectorRequest; import org.opensearch.securityanalytics.action.GetDetectorResponse; - +import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; import org.opensearch.securityanalytics.util.SecurityAnalyticsException; +import org.opensearch.securityanalytics.util.DetectorIndices; import org.opensearch.tasks.Task; +import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; import java.io.IOException; @@ -44,25 +45,52 @@ import static org.opensearch.rest.RestStatus.OK; -public class TransportGetDetectorAction extends HandledTransportAction { +public class TransportGetDetectorAction extends HandledTransportAction implements SecureTransportAction { private final Client client; private final NamedXContentRegistry xContentRegistry; + private final DetectorIndices detectorIndices; + + private final ClusterService clusterService; + + private final Settings settings; + + private final ThreadPool threadPool; + + private volatile Boolean filterByEnabled; + private static final Logger log = LogManager.getLogger(TransportGetDetectorAction.class); @Inject - public TransportGetDetectorAction(TransportService transportService, ActionFilters actionFilters, NamedXContentRegistry xContentRegistry, Client client) { + public TransportGetDetectorAction(TransportService transportService, ActionFilters actionFilters, DetectorIndices detectorIndices, ClusterService clusterService, NamedXContentRegistry xContentRegistry, Client client, Settings settings) { super(GetDetectorAction.NAME, transportService, actionFilters, GetDetectorRequest::new); this.xContentRegistry = xContentRegistry; this.client = client; + this.detectorIndices = detectorIndices; + this.clusterService = clusterService; + this.threadPool = this.detectorIndices.getThreadPool(); + this.settings = settings; + this.filterByEnabled = SecurityAnalyticsSettings.FILTER_BY_BACKEND_ROLES.get(this.settings); + + this.clusterService.getClusterSettings().addSettingsUpdateConsumer(SecurityAnalyticsSettings.FILTER_BY_BACKEND_ROLES, this::setFilterByEnabled); } @Override protected void doExecute(Task task, GetDetectorRequest request, ActionListener actionListener) { + User user = readUserFromThreadContext(this.threadPool); + + String validateBackendRoleMessage = validateUserBackendRoles(user, this.filterByEnabled); + if (!"".equals(validateBackendRoleMessage)) { + actionListener.onFailure(new OpenSearchStatusException("Do not have permissions to resource", RestStatus.FORBIDDEN)); + return; + } + + this.threadPool.getThreadContext().stashContext(); + GetRequest getRequest = new GetRequest(Detector.DETECTORS_INDEX, request.getDetectorId()) .version(request.getVersion()); @@ -81,8 +109,21 @@ public void onResponse(GetResponse response) { response.getSourceAsBytesRef(), XContentType.JSON ); detector = Detector.docParse(xcp, response.getId(), response.getVersion()); + assert detector != null; + // security is enabled and filterby is enabled + if (!checkUserPermissionsWithResource( + user, + detector.getUser(), + "detector", + detector.getId(), + TransportGetDetectorAction.this.filterByEnabled + ) + ) { + actionListener.onFailure(new OpenSearchStatusException("Do not have permissions to resource", RestStatus.FORBIDDEN)); + return; + } } - assert detector != null; + actionListener.onResponse(new GetDetectorResponse(detector.getId(), detector.getVersion(), OK, detector)); } catch (IOException ex) { actionListener.onFailure(ex); @@ -96,4 +137,8 @@ public void onFailure(Exception e) { }); } + private void setFilterByEnabled(boolean filterByEnabled) { + this.filterByEnabled = filterByEnabled; + } + } \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportGetFindingsAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportGetFindingsAction.java index 35fff632c..4ec9adeb5 100644 --- a/src/main/java/org/opensearch/securityanalytics/transport/TransportGetFindingsAction.java +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportGetFindingsAction.java @@ -10,16 +10,21 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.search.join.ScoreMode; +import org.opensearch.OpenSearchStatusException; import org.opensearch.action.ActionListener; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.HandledTransportAction; import org.opensearch.client.Client; +import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.inject.Inject; -import org.opensearch.common.xcontent.NamedXContentRegistry; +import org.opensearch.common.settings.Settings; +import org.opensearch.commons.authuser.User; +import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.index.query.NestedQueryBuilder; import org.opensearch.index.query.QueryBuilders; +import org.opensearch.rest.RestStatus; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.securityanalytics.action.GetFindingsAction; import org.opensearch.securityanalytics.action.GetFindingsRequest; @@ -27,14 +32,18 @@ import org.opensearch.securityanalytics.action.SearchDetectorRequest; import org.opensearch.securityanalytics.findings.FindingsService; import org.opensearch.securityanalytics.model.Detector; +import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; +import org.opensearch.securityanalytics.util.DetectorIndices; import org.opensearch.securityanalytics.util.DetectorUtils; +import org.opensearch.securityanalytics.util.SecurityAnalyticsException; import org.opensearch.tasks.Task; +import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; import static org.opensearch.securityanalytics.util.DetectorUtils.DETECTOR_TYPE_PATH; -public class TransportGetFindingsAction extends HandledTransportAction { +public class TransportGetFindingsAction extends HandledTransportAction implements SecureTransportAction { private final TransportSearchDetectorAction transportSearchDetectorAction; @@ -42,25 +51,50 @@ public class TransportGetFindingsAction extends HandledTransportAction actionListener) { + + User user = readUserFromThreadContext(this.threadPool); + + String validateBackendRoleMessage = validateUserBackendRoles(user, this.filterByEnabled); + if (!"".equals(validateBackendRoleMessage)) { + actionListener.onFailure(new OpenSearchStatusException("Do not have permissions to resource", RestStatus.FORBIDDEN)); + return; + } + if (request.getDetectorType() == null) { findingsService.getFindingsByDetectorId( request.getDetectorId(), request.getTable(), actionListener - ); + ); } else { // "detector" is nested type so we have to use nested query NestedQueryBuilder queryBuilder = @@ -69,7 +103,7 @@ protected void doExecute(Task task, GetFindingsRequest request, ActionListener detectors = DetectorUtils.getDetectors(searchResponse, xContentRegistry); + if (detectors.size() == 0) { + actionListener.onFailure( + SecurityAnalyticsException.wrap( + new OpenSearchStatusException( + "No detectors found for provided type", RestStatus.NOT_FOUND + ) + ) + ); + return; + } findingsService.getFindings( detectors, request.getDetectorType(), @@ -105,4 +149,8 @@ public void onFailure(Exception e) { } } + private void setFilterByEnabled(boolean filterByEnabled) { + this.filterByEnabled = filterByEnabled; + } + } \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportGetIndexMappingsAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportGetIndexMappingsAction.java index b9d790410..b8b9110d8 100644 --- a/src/main/java/org/opensearch/securityanalytics/transport/TransportGetIndexMappingsAction.java +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportGetIndexMappingsAction.java @@ -4,43 +4,48 @@ */ package org.opensearch.securityanalytics.transport; +import org.opensearch.OpenSearchStatusException; import org.opensearch.action.ActionListener; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.HandledTransportAction; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.inject.Inject; +import org.opensearch.rest.RestStatus; import org.opensearch.securityanalytics.action.GetIndexMappingsAction; import org.opensearch.securityanalytics.mapper.MapperService; import org.opensearch.securityanalytics.action.GetIndexMappingsRequest; import org.opensearch.securityanalytics.action.GetIndexMappingsResponse; +import org.opensearch.securityanalytics.util.SecurityAnalyticsException; import org.opensearch.tasks.Task; +import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; public class TransportGetIndexMappingsAction extends HandledTransportAction { private MapperService mapperService; private ClusterService clusterService; + private final ThreadPool threadPool; + @Inject public TransportGetIndexMappingsAction( TransportService transportService, ActionFilters actionFilters, GetIndexMappingsAction getIndexMappingsAction, MapperService mapperService, - ClusterService clusterService + ClusterService clusterService, + ThreadPool threadPool ) { super(getIndexMappingsAction.NAME, transportService, actionFilters, GetIndexMappingsRequest::new); this.clusterService = clusterService; this.mapperService = mapperService; + this.threadPool = threadPool; } @Override protected void doExecute(Task task, GetIndexMappingsRequest request, ActionListener actionListener) { - IndexMetadata index = clusterService.state().metadata().index(request.getIndexName()); - if (index == null) { - actionListener.onFailure(new IllegalStateException("Could not find index [" + request.getIndexName() + "]")); - return; - } + this.threadPool.getThreadContext().stashContext(); + mapperService.getMappingAction(request.getIndexName(), actionListener); } } \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportGetMappingsViewAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportGetMappingsViewAction.java index bf71bd312..319bc5bca 100644 --- a/src/main/java/org/opensearch/securityanalytics/transport/TransportGetMappingsViewAction.java +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportGetMappingsViewAction.java @@ -4,12 +4,14 @@ */ package org.opensearch.securityanalytics.transport; +import org.opensearch.OpenSearchStatusException; import org.opensearch.action.ActionListener; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.HandledTransportAction; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.inject.Inject; +import org.opensearch.rest.RestStatus; import org.opensearch.securityanalytics.action.GetIndexMappingsAction; import org.opensearch.securityanalytics.action.GetIndexMappingsRequest; import org.opensearch.securityanalytics.action.GetIndexMappingsResponse; @@ -17,12 +19,15 @@ import org.opensearch.securityanalytics.action.GetMappingsViewRequest; import org.opensearch.securityanalytics.action.GetMappingsViewResponse; import org.opensearch.securityanalytics.mapper.MapperService; +import org.opensearch.securityanalytics.util.SecurityAnalyticsException; import org.opensearch.tasks.Task; +import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; public class TransportGetMappingsViewAction extends HandledTransportAction { private MapperService mapperService; private ClusterService clusterService; + private final ThreadPool threadPool; @Inject public TransportGetMappingsViewAction( @@ -30,20 +35,18 @@ public TransportGetMappingsViewAction( ActionFilters actionFilters, GetMappingsViewAction getMappingsViewAction, MapperService mapperService, - ClusterService clusterService + ClusterService clusterService, + ThreadPool threadPool ) { super(getMappingsViewAction.NAME, transportService, actionFilters, GetMappingsViewRequest::new); this.clusterService = clusterService; this.mapperService = mapperService; + this.threadPool = threadPool; } @Override protected void doExecute(Task task, GetMappingsViewRequest request, ActionListener actionListener) { - IndexMetadata index = clusterService.state().metadata().index(request.getIndexName()); - if (index == null) { - actionListener.onFailure(new IllegalStateException("Could not find index [" + request.getIndexName() + "]")); - return; - } - mapperService.getMappingsViewAction(request.getIndexName(), request.getRuleTopic(), actionListener); + this.threadPool.getThreadContext().stashContext(); + this.mapperService.getMappingsViewAction(request.getIndexName(), request.getRuleTopic(), actionListener); } } \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexCorrelationRuleAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexCorrelationRuleAction.java new file mode 100644 index 000000000..defa51faa --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexCorrelationRuleAction.java @@ -0,0 +1,219 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.securityanalytics.transport; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.OpenSearchStatusException; +import org.opensearch.action.ActionListener; +import org.opensearch.action.admin.indices.create.CreateIndexResponse; +import org.opensearch.action.index.IndexRequest; +import org.opensearch.action.index.IndexResponse; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.HandledTransportAction; +import org.opensearch.action.support.WriteRequest; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.client.Client; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.inject.Inject; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.RestStatus; +import org.opensearch.securityanalytics.action.IndexCorrelationRuleAction; +import org.opensearch.securityanalytics.action.IndexCorrelationRuleRequest; +import org.opensearch.securityanalytics.action.IndexCorrelationRuleResponse; +import org.opensearch.securityanalytics.model.CorrelationRule; +import org.opensearch.securityanalytics.util.CorrelationRuleIndices; +import org.opensearch.securityanalytics.util.IndexUtils; +import org.opensearch.tasks.Task; +import org.opensearch.transport.TransportService; + +import java.io.IOException; +import java.util.Locale; + +public class TransportIndexCorrelationRuleAction extends HandledTransportAction { + + private static final Logger log = LogManager.getLogger(TransportIndexCorrelationRuleAction.class); + + private final Client client; + + private final CorrelationRuleIndices correlationRuleIndices; + + private final ClusterService clusterService; + + @Inject + public TransportIndexCorrelationRuleAction( + TransportService transportService, + Client client, + ActionFilters actionFilters, + ClusterService clusterService, + CorrelationRuleIndices correlationRuleIndices + ) { + super(IndexCorrelationRuleAction.NAME, transportService, actionFilters, IndexCorrelationRuleRequest::new); + this.client = client; + this.clusterService = clusterService; + this.correlationRuleIndices = correlationRuleIndices; + } + + @Override + protected void doExecute(Task task, IndexCorrelationRuleRequest request, ActionListener listener) { + AsyncIndexCorrelationRuleAction asyncAction = new AsyncIndexCorrelationRuleAction(request, listener); + asyncAction.start(); + } + + class AsyncIndexCorrelationRuleAction { + private final IndexCorrelationRuleRequest request; + + private final ActionListener listener; + + AsyncIndexCorrelationRuleAction(IndexCorrelationRuleRequest request, ActionListener listener) { + this.request = request; + this.listener = listener; + } + + void start() { + try { + if (!correlationRuleIndices.correlationRuleIndexExists()) { + try { + correlationRuleIndices.initCorrelationRuleIndex(new ActionListener<>() { + @Override + public void onResponse(CreateIndexResponse response) { + try { + onCreateMappingsResponse(response); + indexCorrelationRule(); + } catch (IOException e) { + onFailures(e); + } + } + + @Override + public void onFailure(Exception e) { + onFailures(e); + } + }); + } catch (IOException e) { + onFailures(e); + } + } else if (!IndexUtils.correlationRuleIndexUpdated) { + IndexUtils.updateIndexMapping( + CorrelationRule.CORRELATION_RULE_INDEX, + CorrelationRuleIndices.correlationRuleIndexMappings(), + clusterService.state(), + client.admin().indices(), + new ActionListener<>() { + @Override + public void onResponse(AcknowledgedResponse response) { + onUpdateMappingsResponse(response); + try { + indexCorrelationRule(); + } catch (IOException e) { + onFailures(e); + } + } + + @Override + public void onFailure(Exception e) { + onFailures(e); + } + } + ); + } else { + indexCorrelationRule(); + } + } catch (IOException ex) { + onFailures(ex); + } + } + + void indexCorrelationRule() throws IOException { + IndexRequest indexRequest; + if (request.getMethod() == RestRequest.Method.POST) { + indexRequest = new IndexRequest(CorrelationRule.CORRELATION_RULE_INDEX).setRefreshPolicy( + WriteRequest.RefreshPolicy.IMMEDIATE + ) + .source(request.getCorrelationRule().toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) + .timeout(TimeValue.timeValueSeconds(60)); + } else { + indexRequest = new IndexRequest(CorrelationRule.CORRELATION_RULE_INDEX).setRefreshPolicy( + WriteRequest.RefreshPolicy.IMMEDIATE + ) + .source(request.getCorrelationRule().toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) + .id(request.getCorrelationRuleId()) + .timeout(TimeValue.timeValueSeconds(60)); + } + + client.index(indexRequest, new ActionListener<>() { + @Override + public void onResponse(IndexResponse response) { + if (response.status().equals(RestStatus.CREATED) || response.status().equals(RestStatus.OK)) { + CorrelationRule ruleResponse = request.getCorrelationRule(); + ruleResponse.setId(response.getId()); + onOperation(ruleResponse); + } else { + onFailures(new OpenSearchStatusException(response.toString(), RestStatus.INTERNAL_SERVER_ERROR)); + } + } + + @Override + public void onFailure(Exception e) { + onFailures(e); + } + }); + } + + private void onCreateMappingsResponse(CreateIndexResponse response) throws IOException { + if (response.isAcknowledged()) { + log.info(String.format(Locale.ROOT, "Created %s with mappings.", CorrelationRule.CORRELATION_RULE_INDEX)); + IndexUtils.correlationRuleIndexUpdated(); + } else { + log.error(String.format(Locale.ROOT, "Create %s mappings call not acknowledged.", CorrelationRule.CORRELATION_RULE_INDEX)); + throw new OpenSearchStatusException( + String.format(Locale.getDefault(), "Create %s mappings call not acknowledged", CorrelationRule.CORRELATION_RULE_INDEX), + RestStatus.INTERNAL_SERVER_ERROR + ); + } + } + + private void onUpdateMappingsResponse(AcknowledgedResponse response) { + if (response.isAcknowledged()) { + log.info(String.format(Locale.ROOT, "Created %s with mappings.", CorrelationRule.CORRELATION_RULE_INDEX)); + IndexUtils.correlationRuleIndexUpdated(); + } else { + log.error(String.format(Locale.ROOT, "Create %s mappings call not acknowledged.", CorrelationRule.CORRELATION_RULE_INDEX)); + throw new OpenSearchStatusException( + String.format(Locale.getDefault(), "Create %s mappings call not acknowledged", CorrelationRule.CORRELATION_RULE_INDEX), + RestStatus.INTERNAL_SERVER_ERROR + ); + } + } + + private void onOperation(CorrelationRule correlationRule) { + finishHim(correlationRule, null); + } + + private void onFailures(Exception t) { + finishHim(null, t); + } + + private void finishHim(CorrelationRule correlationRule, Exception t) { + if (t != null) { + listener.onFailure(t); + } else { + listener.onResponse(new IndexCorrelationRuleResponse( + correlationRule.getId(), + correlationRule.getVersion(), + request.getMethod() == RestRequest.Method.POST ? RestStatus.CREATED : RestStatus.OK, + correlationRule + )); + } + } + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java index 50b59d03d..923b77fa5 100644 --- a/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexDetectorAction.java @@ -4,14 +4,6 @@ */ package org.opensearch.securityanalytics.transport; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicReference; -import java.util.stream.Collectors; import org.apache.commons.lang3.tuple.Pair; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -19,6 +11,7 @@ import org.opensearch.OpenSearchStatusException; import org.opensearch.action.ActionListener; import org.opensearch.action.ActionRunnable; +import org.opensearch.action.StepListener; import org.opensearch.action.admin.indices.create.CreateIndexResponse; import org.opensearch.action.bulk.BulkResponse; import org.opensearch.action.get.GetRequest; @@ -28,52 +21,76 @@ import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.GroupedActionListener; import org.opensearch.action.support.HandledTransportAction; import org.opensearch.action.support.WriteRequest; +import org.opensearch.action.support.WriteRequest.RefreshPolicy; import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.client.Client; import org.opensearch.client.node.NodeClient; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.SetOnce; import org.opensearch.common.inject.Inject; +import org.opensearch.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.common.xcontent.NamedXContentRegistry; -import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.commons.alerting.AlertingPluginInterface; +import org.opensearch.commons.alerting.action.DeleteMonitorRequest; +import org.opensearch.commons.alerting.action.DeleteMonitorResponse; import org.opensearch.commons.alerting.action.IndexMonitorRequest; import org.opensearch.commons.alerting.action.IndexMonitorResponse; +import org.opensearch.commons.alerting.model.BucketLevelTrigger; import org.opensearch.commons.alerting.model.DataSources; import org.opensearch.commons.alerting.model.DocLevelMonitorInput; import org.opensearch.commons.alerting.model.DocLevelQuery; import org.opensearch.commons.alerting.model.DocumentLevelTrigger; import org.opensearch.commons.alerting.model.Monitor; +import org.opensearch.commons.alerting.model.Monitor.MonitorType; +import org.opensearch.commons.alerting.model.SearchInput; import org.opensearch.commons.alerting.model.action.Action; +import org.opensearch.commons.authuser.User; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.index.IndexNotFoundException; +import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; +import org.opensearch.index.query.RangeQueryBuilder; import org.opensearch.index.reindex.BulkByScrollResponse; import org.opensearch.index.seqno.SequenceNumbers; import org.opensearch.rest.RestRequest; +import org.opensearch.rest.RestRequest.Method; import org.opensearch.rest.RestStatus; import org.opensearch.script.Script; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.securityanalytics.action.GetIndexMappingsAction; +import org.opensearch.securityanalytics.action.GetIndexMappingsRequest; +import org.opensearch.securityanalytics.action.GetIndexMappingsResponse; import org.opensearch.securityanalytics.action.IndexDetectorAction; import org.opensearch.securityanalytics.action.IndexDetectorRequest; import org.opensearch.securityanalytics.action.IndexDetectorResponse; import org.opensearch.securityanalytics.config.monitors.DetectorMonitorConfig; import org.opensearch.securityanalytics.mapper.MapperService; +import org.opensearch.securityanalytics.mapper.MapperUtils; import org.opensearch.securityanalytics.model.Detector; import org.opensearch.securityanalytics.model.DetectorInput; import org.opensearch.securityanalytics.model.DetectorRule; import org.opensearch.securityanalytics.model.DetectorTrigger; import org.opensearch.securityanalytics.model.Rule; import org.opensearch.securityanalytics.model.Value; +import org.opensearch.securityanalytics.rules.aggregation.AggregationItem; +import org.opensearch.securityanalytics.rules.backend.OSQueryBackend; +import org.opensearch.securityanalytics.rules.backend.QueryBackend; +import org.opensearch.securityanalytics.rules.exceptions.SigmaError; import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; import org.opensearch.securityanalytics.util.DetectorIndices; import org.opensearch.securityanalytics.util.IndexUtils; @@ -84,9 +101,25 @@ import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; -public class TransportIndexDetectorAction extends HandledTransportAction { +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; + +public class TransportIndexDetectorAction extends HandledTransportAction implements SecureTransportAction { + public static final String PLUGIN_OWNER_FIELD = "security_analytics"; private static final Logger log = LogManager.getLogger(TransportIndexDetectorAction.class); + public static final String TIMESTAMP_FIELD_ALIAS = "timestamp"; private final Client client; @@ -104,12 +137,29 @@ public class TransportIndexDetectorAction extends HandledTransportAction listener) { - AsyncIndexDetectorsAction asyncAction = new AsyncIndexDetectorsAction(task, request, listener); - asyncAction.start(); + User user = readUserFromThreadContext(this.threadPool); + + String validateBackendRoleMessage = validateUserBackendRoles(user, this.filterByEnabled); + if (!"".equals(validateBackendRoleMessage)) { + listener.onFailure(SecurityAnalyticsException.wrap(new OpenSearchStatusException(validateBackendRoleMessage, RestStatus.FORBIDDEN))); + return; + } + + checkIndicesAndExecute(task, request, listener, user); } - private void createAlertingMonitorFromQueries(Pair>> logIndexToQueries, Detector detector, ActionListener listener, WriteRequest.RefreshPolicy refreshPolicy) { + private void checkIndicesAndExecute( + Task task, + IndexDetectorRequest request, + ActionListener listener, + User user + ) { + String [] detectorIndices = request.getDetector().getInputs().stream().flatMap(detectorInput -> detectorInput.getIndices().stream()).toArray(String[]::new); + SearchRequest searchRequest = new SearchRequest(detectorIndices).source(SearchSourceBuilder.searchSource().size(1).query(QueryBuilders.matchAllQuery()));; + searchRequest.setCancelAfterTimeInterval(TimeValue.timeValueSeconds(30)); + client.search(searchRequest, new ActionListener<>() { + @Override + public void onResponse(SearchResponse searchResponse) { + log.debug("check indices and execute completed. Took {} millis", searchResponse.getTook().millis()); + AsyncIndexDetectorsAction asyncAction = new AsyncIndexDetectorsAction(user, task, request, listener); + asyncAction.start(); + } + + @Override + public void onFailure(Exception e) { + log.debug("check indices and execute failed", e); + if (e instanceof OpenSearchStatusException) { + listener.onFailure(SecurityAnalyticsException.wrap( + new OpenSearchStatusException(String.format(Locale.getDefault(), "User doesn't have read permissions for one or more configured index %s", detectorIndices), RestStatus.FORBIDDEN) + )); + } else if (e instanceof IndexNotFoundException) { + listener.onFailure(SecurityAnalyticsException.wrap( + new OpenSearchStatusException(String.format(Locale.getDefault(), "Indices not found %s", String.join(", ", detectorIndices)), RestStatus.NOT_FOUND) + )); + } + else { + listener.onFailure(SecurityAnalyticsException.wrap(e)); + } + } + }); + } + + private void createMonitorFromQueries(List> rulesById, Detector detector, ActionListener> listener, WriteRequest.RefreshPolicy refreshPolicy, + List queryFieldNames) { + try { + List> docLevelRules = rulesById.stream().filter(it -> !it.getRight().isAggregationRule()).collect( + Collectors.toList()); + List> bucketLevelRules = rulesById.stream().filter(it -> it.getRight().isAggregationRule()).collect( + Collectors.toList()); + + List monitorRequests = new ArrayList<>(); + + if (!docLevelRules.isEmpty()) { + monitorRequests.add(createDocLevelMonitorRequest(docLevelRules, detector, refreshPolicy, Monitor.NO_ID, Method.POST, queryFieldNames)); + } + + if (!bucketLevelRules.isEmpty()) { + StepListener> bucketLevelMonitorRequests = new StepListener<>(); + buildBucketLevelMonitorRequests(bucketLevelRules, detector, refreshPolicy, Monitor.NO_ID, Method.POST, bucketLevelMonitorRequests); + bucketLevelMonitorRequests.whenComplete(indexMonitorRequests -> { + monitorRequests.addAll(indexMonitorRequests); + // Do nothing if detector doesn't have any monitor + if (monitorRequests.isEmpty()) { + listener.onResponse(Collections.emptyList()); + return; + } + + List monitorResponses = new ArrayList<>(); + StepListener addFirstMonitorStep = new StepListener(); + + // Indexing monitors in two steps in order to prevent all shards failed error from alerting + // https://github.com/opensearch-project/alerting/issues/646 + AlertingPluginInterface.INSTANCE.indexMonitor((NodeClient) client, monitorRequests.get(0), namedWriteableRegistry, addFirstMonitorStep); + addFirstMonitorStep.whenComplete(addedFirstMonitorResponse -> { + monitorResponses.add(addedFirstMonitorResponse); + int numberOfUnprocessedResponses = monitorRequests.size() - 1; + if (numberOfUnprocessedResponses == 0) { + listener.onResponse(monitorResponses); + } else { + GroupedActionListener monitorResponseListener = new GroupedActionListener( + new ActionListener>() { + @Override + public void onResponse(Collection indexMonitorResponse) { + monitorResponses.addAll(indexMonitorResponse.stream().collect(Collectors.toList())); + listener.onResponse(monitorResponses); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }, numberOfUnprocessedResponses); + + for (int i = 1; i < monitorRequests.size(); i++) { + AlertingPluginInterface.INSTANCE.indexMonitor((NodeClient) client, monitorRequests.get(i), namedWriteableRegistry, monitorResponseListener); + } + } + }, + listener::onFailure + ); + }, listener::onFailure); + } else { + // Failure if detector doesn't have any monitor + if (monitorRequests.isEmpty()) { + listener.onFailure(new OpenSearchStatusException("Detector cannot be created as no compatible rules were provided", RestStatus.BAD_REQUEST)); + return; + } + + List monitorResponses = new ArrayList<>(); + StepListener addFirstMonitorStep = new StepListener(); + + // Indexing monitors in two steps in order to prevent all shards failed error from alerting + // https://github.com/opensearch-project/alerting/issues/646 + AlertingPluginInterface.INSTANCE.indexMonitor((NodeClient) client, monitorRequests.get(0), namedWriteableRegistry, addFirstMonitorStep); + addFirstMonitorStep.whenComplete(addedFirstMonitorResponse -> { + monitorResponses.add(addedFirstMonitorResponse); + int numberOfUnprocessedResponses = monitorRequests.size() - 1; + if (numberOfUnprocessedResponses == 0) { + listener.onResponse(monitorResponses); + } else { + GroupedActionListener monitorResponseListener = new GroupedActionListener( + new ActionListener>() { + @Override + public void onResponse(Collection indexMonitorResponse) { + monitorResponses.addAll(indexMonitorResponse.stream().collect(Collectors.toList())); + listener.onResponse(monitorResponses); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }, numberOfUnprocessedResponses); + + for (int i = 1; i < monitorRequests.size(); i++) { + AlertingPluginInterface.INSTANCE.indexMonitor((NodeClient) client, monitorRequests.get(i), namedWriteableRegistry, monitorResponseListener); + } + } + }, + listener::onFailure + ); + } + } catch (Exception e) { + listener.onFailure(e); + } + } + + private void updateMonitorFromQueries(List> rulesById, + Detector detector, + ActionListener> listener, + WriteRequest.RefreshPolicy refreshPolicy, + List queryFieldNames) { + List monitorsToBeUpdated = new ArrayList<>(); + + List> bucketLevelRules = rulesById.stream().filter(it -> it.getRight().isAggregationRule()).collect( + Collectors.toList()); + + List monitorsToBeAdded = new ArrayList<>(); + // Process bucket level monitors + if (!bucketLevelRules.isEmpty()) { + try { + List ruleCategories = bucketLevelRules.stream().map(Pair::getRight).map(Rule::getCategory).distinct().collect( + Collectors.toList()); + Map queryBackendMap = new HashMap<>(); + for (String category : ruleCategories) { + queryBackendMap.put(category, new OSQueryBackend(category, true, true)); + } + + // Pair of RuleId - MonitorId for existing monitors of the detector + Map monitorPerRule = detector.getRuleIdMonitorIdMap(); + GroupedActionListener groupedActionListener = new GroupedActionListener<>( + new ActionListener<>() { + @Override + public void onResponse(Collection indexMonitorRequests) { + onIndexMonitorRequestCreation( + monitorsToBeUpdated, + monitorsToBeAdded, + rulesById, + detector, + refreshPolicy, + queryFieldNames, + listener + ); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }, bucketLevelRules.size() + ); + for (Pair query : bucketLevelRules) { + Rule rule = query.getRight(); + if (rule.getAggregationQueries() != null) { + // Detect if the monitor should be added or updated + if (monitorPerRule.containsKey(rule.getId())) { + String monitorId = monitorPerRule.get(rule.getId()); + createBucketLevelMonitorRequest(query.getRight(), + detector, + refreshPolicy, + monitorId, + Method.PUT, + queryBackendMap.get(rule.getCategory()), + new ActionListener<>() { + @Override + public void onResponse(IndexMonitorRequest indexMonitorRequest) { + monitorsToBeUpdated.add(indexMonitorRequest); + groupedActionListener.onResponse(indexMonitorRequest); + } + + @Override + public void onFailure(Exception e) { + log.error("Failed to create bucket level monitor request", e); + listener.onFailure(e); + } + }); + } else { + createBucketLevelMonitorRequest(query.getRight(), + detector, + refreshPolicy, + Monitor.NO_ID, + Method.POST, + queryBackendMap.get(rule.getCategory()), + new ActionListener<>() { + @Override + public void onResponse(IndexMonitorRequest indexMonitorRequest) { + monitorsToBeAdded.add(indexMonitorRequest); + groupedActionListener.onResponse(indexMonitorRequest); + + } + + @Override + public void onFailure(Exception e) { + log.error("Failed to create bucket level monitor request", e); + listener.onFailure(e); + } + }); + } + } + } + + } catch (Exception ex) { + listener.onFailure(ex); + } + } else { + onIndexMonitorRequestCreation( + monitorsToBeUpdated, + monitorsToBeAdded, + rulesById, + detector, + refreshPolicy, + queryFieldNames, + listener + ); + } + } + + private void onIndexMonitorRequestCreation(List monitorsToBeUpdated, + List monitorsToBeAdded, + List> rulesById, + Detector detector, + RefreshPolicy refreshPolicy, + List queryFieldNames, + ActionListener> listener) { + List> docLevelRules = rulesById.stream().filter(it -> !it.getRight().isAggregationRule()).collect( + Collectors.toList()); + + // Process doc level monitors + if (!docLevelRules.isEmpty()) { + if (detector.getDocLevelMonitorId() == null) { + monitorsToBeAdded.add(createDocLevelMonitorRequest(docLevelRules, detector, refreshPolicy, Monitor.NO_ID, Method.POST, queryFieldNames)); + } else { + monitorsToBeUpdated.add(createDocLevelMonitorRequest(docLevelRules, detector, refreshPolicy, detector.getDocLevelMonitorId(), Method.PUT, queryFieldNames)); + } + } + + List monitorIdsToBeDeleted = detector.getRuleIdMonitorIdMap().values().stream().collect(Collectors.toList()); + monitorIdsToBeDeleted.removeAll(monitorsToBeUpdated.stream().map(IndexMonitorRequest::getMonitorId).collect( + Collectors.toList())); + + updateAlertingMonitors(rulesById, detector, monitorsToBeAdded, monitorsToBeUpdated, monitorIdsToBeDeleted, refreshPolicy, listener); + } + + + /** + * Update list of monitors for the given detector + * Executed in a steps: + * 1. Add new monitors; + * 2. Update existing monitors; + * 3. Delete the monitors omitted from request + * 4. Respond with updated list of monitors + * @param monitorsToBeAdded Newly added monitors by the user + * @param monitorsToBeUpdated Existing monitors that will be updated + * @param monitorsToBeDeleted Monitors omitted by the user + * @param refreshPolicy + * @param listener Listener that accepts the list of updated monitors if the action was successful + */ + private void updateAlertingMonitors( + List> rulesById, + Detector detector, + List monitorsToBeAdded, + List monitorsToBeUpdated, + List monitorsToBeDeleted, + RefreshPolicy refreshPolicy, + ActionListener> listener + ) { + List updatedMonitors = new ArrayList<>(); + + // Update monitor steps + StepListener> addNewMonitorsStep = new StepListener(); + executeMonitorActionRequest(monitorsToBeAdded, addNewMonitorsStep); + // 1. Add new alerting monitors (for the rules that didn't exist previously) + addNewMonitorsStep.whenComplete(addNewMonitorsResponse -> { + if (addNewMonitorsResponse != null && !addNewMonitorsResponse.isEmpty()) { + updatedMonitors.addAll(addNewMonitorsResponse); + } + + StepListener> updateMonitorsStep = new StepListener<>(); + executeMonitorActionRequest(monitorsToBeUpdated, updateMonitorsStep); + // 2. Update existing alerting monitors (based on the common rules) + updateMonitorsStep.whenComplete(updateMonitorResponse -> { + if (updateMonitorResponse != null && !updateMonitorResponse.isEmpty()) { + updatedMonitors.addAll(updateMonitorResponse); + } + + StepListener> deleteMonitorStep = new StepListener<>(); + deleteAlertingMonitors(monitorsToBeDeleted, refreshPolicy, deleteMonitorStep); + // 3. Delete alerting monitors (rules that are not provided by the user) + deleteMonitorStep.whenComplete(deleteMonitorResponses -> + // Return list of all updated + newly added monitors + listener.onResponse(updatedMonitors), + // Handle delete monitors (step 3) + listener::onFailure); + }, // Handle update monitor failed (step 2) + listener::onFailure); + // Handle add failed (step 1) + }, listener::onFailure); + } + + private IndexMonitorRequest createDocLevelMonitorRequest(List> queries, Detector detector, RefreshPolicy refreshPolicy, String monitorId, Method restMethod, List queryFieldNames) { List docLevelMonitorInputs = new ArrayList<>(); List docLevelQueries = new ArrayList<>(); - for (Pair query: logIndexToQueries.getRight()) { + for (Pair query: queries) { String id = query.getLeft(); Rule rule = query.getRight(); @@ -151,7 +546,7 @@ private void createAlertingMonitorFromQueries(Pair triggers = new ArrayList<>(); @@ -167,69 +562,264 @@ private void createAlertingMonitorFromQueries(Pair>> logIndexToQueries, Detector detector, ActionListener listener, WriteRequest.RefreshPolicy refreshPolicy) { - List docLevelMonitorInputs = new ArrayList<>(); + private void buildBucketLevelMonitorRequests(List> queries, Detector detector, WriteRequest.RefreshPolicy refreshPolicy, String monitorId, RestRequest.Method restMethod, ActionListener> listener) throws Exception { + log.debug("bucket level monitor request starting"); + log.debug("get rule field mappings request being made"); - List docLevelQueries = new ArrayList<>(); + log.debug("got rule field mapping success"); + List ruleCategories = queries.stream().map(Pair::getRight).map(Rule::getCategory).distinct().collect( + Collectors.toList()); + Map queryBackendMap = new HashMap<>(); + for (String category : ruleCategories) { + try { + queryBackendMap.put(category, new OSQueryBackend(category, true, true)); + } catch (IOException e) { + logger.error("Failed to create OSQueryBackend from field mappings", e); + listener.onFailure(e); + } + } - for (Pair query: logIndexToQueries.getRight()) { - String id = query.getLeft(); + List monitorRequests = new ArrayList<>(); + GroupedActionListener bucketLevelMonitorRequestsListener = new GroupedActionListener<>( + new ActionListener<>() { + @Override + public void onResponse(Collection indexMonitorRequests) { + listener.onResponse(monitorRequests); + } + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }, queries.size() + ); + for (Pair query : queries) { Rule rule = query.getRight(); - String name = query.getLeft(); - String actualQuery = rule.getQueries().get(0).getValue(); + // Creating bucket level monitor per each aggregation rule + if (rule.getAggregationQueries() != null) { + createBucketLevelMonitorRequest( + query.getRight(), + detector, + refreshPolicy, + monitorId, + restMethod, + queryBackendMap.get(rule.getCategory()), + new ActionListener<>() { + @Override + public void onResponse(IndexMonitorRequest indexMonitorRequest) { + monitorRequests.add(indexMonitorRequest); + bucketLevelMonitorRequestsListener.onResponse(indexMonitorRequest); + } - List tags = new ArrayList<>(); - tags.add(rule.getLevel()); - tags.add(rule.getCategory()); - tags.addAll(rule.getTags().stream().map(Value::getValue).collect(Collectors.toList())); - DocLevelQuery docLevelQuery = new DocLevelQuery(id, name, actualQuery, tags); - docLevelQueries.add(docLevelQuery); + @Override + public void onFailure(Exception e) { + logger.error("Failed to build bucket level monitor requests", e); + bucketLevelMonitorRequestsListener.onFailure(e); + } + }); + + } else { + log.debug("Aggregation query is null in rule {}", rule.getId()); + bucketLevelMonitorRequestsListener.onResponse(null); + } } - DocLevelMonitorInput docLevelMonitorInput = new DocLevelMonitorInput(detector.getName(), List.of(logIndexToQueries.getKey()), docLevelQueries); - docLevelMonitorInputs.add(docLevelMonitorInput); + } - List triggers = new ArrayList<>(); - List detectorTriggers = detector.getTriggers(); + private void createBucketLevelMonitorRequest( + Rule rule, + Detector detector, + WriteRequest.RefreshPolicy refreshPolicy, + String monitorId, + RestRequest.Method restMethod, + QueryBackend queryBackend, + ActionListener listener + ) { + log.debug(":create bucket level monitor response starting"); + List indices = detector.getInputs().get(0).getIndices(); + try { + AggregationItem aggItem = rule.getAggregationItemsFromRule().get(0); + OSQueryBackend.AggregationQueries aggregationQueries = queryBackend.convertAggregation(aggItem); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() + .seqNoAndPrimaryTerm(true) + .version(true) + // Build query string filter + .query(QueryBuilders.queryStringQuery(rule.getQueries().get(0).getValue())) + .aggregation(aggregationQueries.getAggBuilder()); + // input index can also be an index pattern or alias so we have to resolve it to concrete index + String concreteIndex = IndexUtils.getNewIndexByCreationDate( + clusterService.state(), + indexNameExpressionResolver, + indices.get(0) // taking first one is fine because we expect that all indices in list share same mappings + ); + client.execute( + GetIndexMappingsAction.INSTANCE, + new GetIndexMappingsRequest(concreteIndex), + new ActionListener<>() { + @Override + public void onResponse(GetIndexMappingsResponse getIndexMappingsResponse) { + MappingMetadata mappingMetadata = getIndexMappingsResponse.mappings().get(concreteIndex); + List> pairs = null; + try { + pairs = MapperUtils.getAllAliasPathPairs(mappingMetadata); + } catch (IOException e) { + logger.debug("Failed to get alias path pairs from mapping metadata", e); + onFailure(e); + } + boolean timeStampAliasPresent = pairs. + stream() + .anyMatch(p -> + TIMESTAMP_FIELD_ALIAS.equals(p.getLeft()) || TIMESTAMP_FIELD_ALIAS.equals(p.getRight())); + if (timeStampAliasPresent) { + BoolQueryBuilder boolQueryBuilder = searchSourceBuilder.query() == null + ? new BoolQueryBuilder() + : QueryBuilders.boolQuery().must(searchSourceBuilder.query()); + RangeQueryBuilder timeRangeFilter = QueryBuilders.rangeQuery(TIMESTAMP_FIELD_ALIAS) + .gt("{{period_end}}||-1h") + .lte("{{period_end}}") + .format("epoch_millis"); + boolQueryBuilder.must(timeRangeFilter); + searchSourceBuilder.query(boolQueryBuilder); + } + List bucketLevelMonitorInputs = new ArrayList<>(); + bucketLevelMonitorInputs.add(new SearchInput(indices, searchSourceBuilder)); + + List triggers = new ArrayList<>(); + BucketLevelTrigger bucketLevelTrigger = new BucketLevelTrigger(rule.getId(), rule.getTitle(), rule.getLevel(), aggregationQueries.getCondition(), + Collections.emptyList()); + triggers.add(bucketLevelTrigger); + + /** TODO - Think how to use detector trigger + List detectorTriggers = detector.getTriggers(); + for (DetectorTrigger detectorTrigger: detectorTriggers) { + String id = detectorTrigger.getId(); + String name = detectorTrigger.getName(); + String severity = detectorTrigger.getSeverity(); + List actions = detectorTrigger.getActions(); + Script condition = detectorTrigger.convertToCondition(); + + BucketLevelTrigger bucketLevelTrigger1 = new BucketLevelTrigger(id, name, severity, condition, actions); + triggers.add(bucketLevelTrigger1); + } **/ + + Monitor monitor = new Monitor(monitorId, Monitor.NO_VERSION, detector.getName(), false, detector.getSchedule(), detector.getLastUpdateTime(), null, + MonitorType.BUCKET_LEVEL_MONITOR, detector.getUser(), 1, bucketLevelMonitorInputs, triggers, Map.of(), + new DataSources(detector.getRuleIndex(), + detector.getFindingsIndex(), + detector.getFindingsIndexPattern(), + detector.getAlertsIndex(), + detector.getAlertsHistoryIndex(), + detector.getAlertsHistoryIndexPattern(), + DetectorMonitorConfig.getRuleIndexMappingsByType(), + true), PLUGIN_OWNER_FIELD); + + listener.onResponse(new IndexMonitorRequest(monitorId, SequenceNumbers.UNASSIGNED_SEQ_NO, SequenceNumbers.UNASSIGNED_PRIMARY_TERM, refreshPolicy, restMethod, monitor, null)); + } - for (DetectorTrigger detectorTrigger: detectorTriggers) { - String id = detectorTrigger.getId(); - String name = detectorTrigger.getName(); - String severity = detectorTrigger.getSeverity(); - List actions = detectorTrigger.getActions(); - Script condition = detectorTrigger.convertToCondition(); + @Override + public void onFailure(Exception e) { + log.error( + String.format(Locale.getDefault(), + "Unable to verify presence of timestamp alias for index [%s] in detector [%s]. Not setting time range filter for bucket level monitor.", + concreteIndex, detector.getName()), e); + listener.onFailure(e); + } + }); + } catch (SigmaError e) { + log.error("Failed to create bucket level monitor request", e); + listener.onFailure(e); + } + } - triggers.add(new DocumentLevelTrigger(id, name, severity, actions, condition)); + /** + * Executes monitor related requests (PUT/POST) - returns the response once all the executions are completed + * @param indexMonitors Monitors to be updated/added + * @param listener actionListener for handling updating/creating monitors + */ + public void executeMonitorActionRequest( + List indexMonitors, + ActionListener> listener) { + + // In the case of not provided monitors, just return empty list + if (indexMonitors == null || indexMonitors.isEmpty()) { + listener.onResponse(new ArrayList<>()); + return; + } + + GroupedActionListener monitorResponseListener = new GroupedActionListener( + new ActionListener>() { + @Override + public void onResponse(Collection indexMonitorResponse) { + listener.onResponse(indexMonitorResponse.stream().collect(Collectors.toList())); + } + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }, indexMonitors.size()); + + // Persist monitors sequentially + for (IndexMonitorRequest req: indexMonitors) { + AlertingPluginInterface.INSTANCE.indexMonitor((NodeClient) client, req, namedWriteableRegistry, monitorResponseListener); + } + } + + /** + * Deletes the alerting monitors based on the given ids and notifies the listener that will be notified once all monitors have been deleted + * @param monitorIds monitor ids to be deleted + * @param refreshPolicy + * @param listener listener that will be notified once all the monitors are being deleted + */ + private void deleteAlertingMonitors(List monitorIds, WriteRequest.RefreshPolicy refreshPolicy, ActionListener> listener){ + if (monitorIds == null || monitorIds.isEmpty()) { + listener.onResponse(new ArrayList<>()); + return; } + ActionListener deletesListener = new GroupedActionListener<>(new ActionListener<>() { + @Override + public void onResponse(Collection responses) { + SetOnce errorStatusSupplier = new SetOnce<>(); + if (responses.stream().filter(response -> { + if (response.getStatus() != RestStatus.OK) { + log.error("Monitor [{}] could not be deleted. Status [{}]", response.getId(), response.getStatus()); + errorStatusSupplier.trySet(response.getStatus()); + return true; + } + return false; + }).count() > 0) { + listener.onFailure(new OpenSearchStatusException("Monitor associated with detected could not be deleted", errorStatusSupplier.get())); + } + listener.onResponse(responses.stream().collect(Collectors.toList())); + } + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }, monitorIds.size()); - Monitor monitor = new Monitor(detector.getMonitorIds().get(0), Monitor.NO_VERSION, detector.getName(), detector.getEnabled(), detector.getSchedule(), detector.getLastUpdateTime(), detector.getEnabledTime(), - Monitor.MonitorType.DOC_LEVEL_MONITOR, detector.getUser(), 1, docLevelMonitorInputs, triggers, Map.of(), - new DataSources(detector.getRuleIndex(), - detector.getFindingsIndex(), - detector.getFindingsIndexPattern(), - detector.getAlertsIndex(), - detector.getAlertsHistoryIndex(), - detector.getAlertsHistoryIndexPattern(), - DetectorMonitorConfig.getRuleIndexMappingsByType(detector.getDetectorType()))); - - IndexMonitorRequest indexMonitorRequest = new IndexMonitorRequest(detector.getMonitorIds().get(0), SequenceNumbers.UNASSIGNED_SEQ_NO, SequenceNumbers.UNASSIGNED_PRIMARY_TERM, refreshPolicy, RestRequest.Method.PUT, monitor); - AlertingPluginInterface.INSTANCE.indexMonitor((NodeClient) client, indexMonitorRequest, listener); + for (String monitorId : monitorIds) { + deleteAlertingMonitor(monitorId, refreshPolicy, deletesListener); + } + } + private void deleteAlertingMonitor(String monitorId, WriteRequest.RefreshPolicy refreshPolicy, ActionListener listener) { + DeleteMonitorRequest request = new DeleteMonitorRequest(monitorId, refreshPolicy); + AlertingPluginInterface.INSTANCE.deleteMonitor((NodeClient) client, request, listener); } private void onCreateMappingsResponse(CreateIndexResponse response) throws IOException { @@ -259,17 +849,21 @@ class AsyncIndexDetectorsAction { private final AtomicReference response; private final AtomicBoolean counter = new AtomicBoolean(); private final Task task; + private final User user; - AsyncIndexDetectorsAction(Task task, IndexDetectorRequest request, ActionListener listener) { + AsyncIndexDetectorsAction(User user, Task task, IndexDetectorRequest request, ActionListener listener) { this.task = task; this.request = request; this.listener = listener; + this.user = user; this.response = new AtomicReference<>(); } void start() { try { + TransportIndexDetectorAction.this.threadPool.getThreadContext().stashContext(); + if (!detectorIndices.detectorIndexExists()) { detectorIndices.initDetectorIndex(new ActionListener<>() { @Override @@ -326,7 +920,6 @@ void prepareDetectorIndexing() throws IOException { void createDetector() { Detector detector = request.getDetector(); - String ruleTopic = detector.getDetectorType(); request.getDetector().setAlertsIndex(DetectorMonitorConfig.getAlertsIndex(ruleTopic)); @@ -336,16 +929,22 @@ void createDetector() { request.getDetector().setFindingsIndexPattern(DetectorMonitorConfig.getFindingsIndexPattern(ruleTopic)); request.getDetector().setRuleIndex(DetectorMonitorConfig.getRuleIndex(ruleTopic)); + User originalContextUser = this.user; + log.debug("user from original context is {}", originalContextUser); + request.getDetector().setUser(originalContextUser); + + if (!detector.getInputs().isEmpty()) { try { - ruleTopicIndices.initRuleTopicIndex(detector.getRuleIndex(), new ActionListener<>() { + ruleTopicIndices.initRuleTopicIndexTemplate(new ActionListener<>() { @Override - public void onResponse(CreateIndexResponse createIndexResponse) { + public void onResponse(AcknowledgedResponse acknowledgedResponse) { initRuleIndexAndImportRules(request, new ActionListener<>() { @Override - public void onResponse(IndexMonitorResponse indexMonitorResponse) { - request.getDetector().setMonitorIds(List.of(indexMonitorResponse.getId())); + public void onResponse(List monitorResponses) { + request.getDetector().setMonitorIds(getMonitorIds(monitorResponses)); + request.getDetector().setRuleIdMonitorIdMap(mapMonitorIds(monitorResponses)); try { indexDetector(); } catch (IOException e) { @@ -374,6 +973,9 @@ public void onFailure(Exception e) { void updateDetector() { String id = request.getDetectorId(); + User originalContextUser = this.user; + log.debug("user from original context is {}", originalContextUser); + GetRequest request = new GetRequest(Detector.DETECTORS_INDEX, id); client.get(request, new ActionListener<>() { @Override @@ -390,7 +992,21 @@ public void onResponse(GetResponse response) { ); Detector detector = Detector.docParse(xcp, response.getId(), response.getVersion()); - onGetResponse(detector); + + // security is enabled and filterby is enabled + if (!checkUserPermissionsWithResource( + originalContextUser, + detector.getUser(), + "detector", + detector.getId(), + TransportIndexDetectorAction.this.filterByEnabled + ) + + ) { + onFailure(SecurityAnalyticsException.wrap(new OpenSearchStatusException("Do not have permissions to resource", RestStatus.FORBIDDEN))); + return; + } + onGetResponse(detector, detector.getUser()); } catch (IOException e) { onFailures(e); } @@ -403,31 +1019,37 @@ public void onFailure(Exception e) { }); } - void onGetResponse(Detector currentDetector) { + void onGetResponse(Detector currentDetector, User user) { if (request.getDetector().getEnabled() && currentDetector.getEnabled()) { request.getDetector().setEnabledTime(currentDetector.getEnabledTime()); } request.getDetector().setMonitorIds(currentDetector.getMonitorIds()); + request.getDetector().setRuleIdMonitorIdMap(currentDetector.getRuleIdMonitorIdMap()); Detector detector = request.getDetector(); String ruleTopic = detector.getDetectorType(); + log.debug("user in update detector {}", user); + + request.getDetector().setAlertsIndex(DetectorMonitorConfig.getAlertsIndex(ruleTopic)); request.getDetector().setAlertsHistoryIndex(DetectorMonitorConfig.getAlertsHistoryIndex(ruleTopic)); request.getDetector().setAlertsHistoryIndexPattern(DetectorMonitorConfig.getAlertsHistoryIndexPattern(ruleTopic)); request.getDetector().setFindingsIndex(DetectorMonitorConfig.getFindingsIndex(ruleTopic)); request.getDetector().setFindingsIndexPattern(DetectorMonitorConfig.getFindingsIndexPattern(ruleTopic)); request.getDetector().setRuleIndex(DetectorMonitorConfig.getRuleIndex(ruleTopic)); + request.getDetector().setUser(user); if (!detector.getInputs().isEmpty()) { try { - ruleTopicIndices.initRuleTopicIndex(detector.getRuleIndex(), new ActionListener<>() { + ruleTopicIndices.initRuleTopicIndexTemplate(new ActionListener<>() { @Override - public void onResponse(CreateIndexResponse createIndexResponse) { + public void onResponse(AcknowledgedResponse acknowledgedResponse) { initRuleIndexAndImportRules(request, new ActionListener<>() { @Override - public void onResponse(IndexMonitorResponse indexMonitorResponse) { - request.getDetector().setMonitorIds(List.of(indexMonitorResponse.getId())); + public void onResponse(List monitorResponses) { + request.getDetector().setMonitorIds(getMonitorIds(monitorResponses)); + request.getDetector().setRuleIdMonitorIdMap(mapMonitorIds(monitorResponses)); try { indexDetector(); } catch (IOException e) { @@ -453,13 +1075,13 @@ public void onFailure(Exception e) { } } - public void initRuleIndexAndImportRules(IndexDetectorRequest request, ActionListener listener) { + public void initRuleIndexAndImportRules(IndexDetectorRequest request, ActionListener> listener) { ruleIndices.initPrepackagedRulesIndex( new ActionListener<>() { @Override public void onResponse(CreateIndexResponse response) { ruleIndices.onCreateMappingsResponse(response, true); - ruleIndices.importRules(WriteRequest.RefreshPolicy.IMMEDIATE, indexTimeout, + ruleIndices.importRules(RefreshPolicy.IMMEDIATE, indexTimeout, new ActionListener<>() { @Override public void onResponse(BulkResponse response) { @@ -558,7 +1180,7 @@ public void onFailure(Exception e) { } @SuppressWarnings("unchecked") - public void importRules(IndexDetectorRequest request, ActionListener listener) { + public void importRules(IndexDetectorRequest request, ActionListener> listener) { final Detector detector = request.getDetector(); final String ruleTopic = detector.getDetectorType(); final DetectorInput detectorInput = detector.getInputs().get(0); @@ -609,17 +1231,11 @@ public void onResponse(SearchResponse response) { if (ruleIndices.ruleIndexExists(false)) { importCustomRules(detector, detectorInput, queries, listener); } else if (detectorInput.getCustomRules().size() > 0) { - onFailures(new OpenSearchStatusException("Custom Rule Index not found", RestStatus.BAD_REQUEST)); + onFailures(new OpenSearchStatusException("Custom Rule Index not found", RestStatus.NOT_FOUND)); } else { - Pair>> logIndexToQueries = Pair.of(logIndex, queries); - - if (request.getMethod() == RestRequest.Method.POST) { - createAlertingMonitorFromQueries(logIndexToQueries, detector, listener, request.getRefreshPolicy()); - } else if (request.getMethod() == RestRequest.Method.PUT) { - updateAlertingMonitorFromQueries(logIndexToQueries, detector, listener, request.getRefreshPolicy()); - } + resolveRuleFieldNamesAndUpsertMonitorFromQueries(queries, detector, logIndex, listener); } - } catch (IOException e) { + } catch (Exception e) { onFailures(e); } } @@ -632,7 +1248,7 @@ public void onFailure(Exception e) { } @SuppressWarnings("unchecked") - public void importCustomRules(Detector detector, DetectorInput detectorInput, List> queries, ActionListener listener) { + public void importCustomRules(Detector detector, DetectorInput detectorInput, List> queries, ActionListener> listener) { final String logIndex = detectorInput.getIndices().get(0); List ruleIds = detectorInput.getCustomRules().stream().map(DetectorRule::getId).collect(Collectors.toList()); @@ -665,22 +1281,65 @@ public void onResponse(SearchResponse response) { queries.add(Pair.of(id, rule)); } + resolveRuleFieldNamesAndUpsertMonitorFromQueries(queries, detector, logIndex, listener); + } catch (Exception ex) { + onFailures(ex); + } + } + + @Override + public void onFailure(Exception e) { + onFailures(e); + } + }); + } - Pair>> logIndexToQueries = Pair.of(logIndex, queries); + private void resolveRuleFieldNamesAndUpsertMonitorFromQueries(List> queries, Detector detector, String logIndex, ActionListener> listener) { + logger.error("PERF_DEBUG_SAP: Fetching alias path pairs to construct rule_field_names"); + long start = System.currentTimeMillis(); + Set ruleFieldNames = new HashSet<>(); + for (Pair query : queries) { + List queryFieldNames = query.getValue().getQueryFieldNames().stream().map(Value::getValue).collect(Collectors.toList()); + ruleFieldNames.addAll(queryFieldNames); + } + client.execute(GetIndexMappingsAction.INSTANCE, new GetIndexMappingsRequest(logIndex), new ActionListener<>() { + @Override + public void onResponse(GetIndexMappingsResponse getMappingsViewResponse) { + try { + List> aliasPathPairs; - if (request.getMethod() == RestRequest.Method.POST) { - createAlertingMonitorFromQueries(logIndexToQueries, detector, listener, request.getRefreshPolicy()); - } else if (request.getMethod() == RestRequest.Method.PUT) { - updateAlertingMonitorFromQueries(logIndexToQueries, detector, listener, request.getRefreshPolicy()); + aliasPathPairs = MapperUtils.getAllAliasPathPairs(getMappingsViewResponse.getMappings().get(logIndex)); + for (Pair aliasPathPair : aliasPathPairs) { + if (ruleFieldNames.contains(aliasPathPair.getLeft())) { + ruleFieldNames.remove(aliasPathPair.getLeft()); + ruleFieldNames.add(aliasPathPair.getRight()); + } } - } catch (IOException ex) { - onFailures(ex); + long took = System.currentTimeMillis() - start; + log.debug("completed collecting rule_field_names in {} millis", took); + + } catch (Exception e) { + logger.error("Failure in parsing rule field names/aliases while " + + detector.getId() == null ? "creating" : "updating" + + " detector. Not optimizing detector queries with relevant fields", e); + ruleFieldNames.clear(); + } + upsertMonitorQueries(queries, detector, listener, ruleFieldNames, logIndex); + + } + + private void upsertMonitorQueries(List> queries, Detector detector, ActionListener> listener, Set ruleFieldNames, String logIndex) { + if (request.getMethod() == Method.POST) { + createMonitorFromQueries(queries, detector, listener, request.getRefreshPolicy(), new ArrayList<>(ruleFieldNames)); + } else if (request.getMethod() == Method.PUT) { + updateMonitorFromQueries(queries, detector, listener, request.getRefreshPolicy(), new ArrayList<>(ruleFieldNames)); } } @Override public void onFailure(Exception e) { - onFailures(e); + log.error("Failed to fetch mappings view response for log index " + logIndex, e); + listener.onFailure(e); } }); } @@ -731,11 +1390,46 @@ private void onFailures(Exception t) { private void finishHim(Detector detector, Exception t) { threadPool.executor(ThreadPool.Names.GENERIC).execute(ActionRunnable.supply(listener, () -> { if (t != null) { + if (t instanceof OpenSearchStatusException) { + throw t; + } throw SecurityAnalyticsException.wrap(t); } else { return new IndexDetectorResponse(detector.getId(), detector.getVersion(), request.getMethod() == RestRequest.Method.POST? RestStatus.CREATED: RestStatus.OK, detector); } })); } + + private List getMonitorIds(List monitorResponses) { + return monitorResponses.stream().map(IndexMonitorResponse::getId).collect( + Collectors.toList()); + } + + /** + * Creates a map of monitor ids. In the case of bucket level monitors pairs are: RuleId - MonitorId + * In the case of doc level monitor pair is DOC_LEVEL_MONITOR(value) - MonitorId + * @param monitorResponses index monitor responses + * @return map of monitor ids + */ + private Map mapMonitorIds(List monitorResponses) { + return monitorResponses.stream().collect( + Collectors.toMap( + // In the case of bucket level monitors rule id is trigger id + it -> { + if (MonitorType.BUCKET_LEVEL_MONITOR == it.getMonitor().getMonitorType()) { + return it.getMonitor().getTriggers().get(0).getId(); + } else { + return Detector.DOC_LEVEL_MONITOR; + } + }, + IndexMonitorResponse::getId + ) + ); + } } + + private void setFilterByEnabled(boolean filterByEnabled) { + this.filterByEnabled = filterByEnabled; + } + } \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexRuleAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexRuleAction.java index e7fc3dfb2..818f3f6a4 100644 --- a/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexRuleAction.java +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportIndexRuleAction.java @@ -4,6 +4,7 @@ */ package org.opensearch.securityanalytics.transport; +import java.util.Set; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.search.join.ScoreMode; @@ -24,11 +25,11 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.common.xcontent.NamedXContentRegistry; -import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.ToXContent; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.rest.RestRequest; @@ -128,6 +129,7 @@ class AsyncIndexRulesAction { } void start() { + TransportIndexRuleAction.this.threadPool.getThreadContext().stashContext(); try { if (!ruleIndices.ruleIndexExists(false)) { ruleIndices.initRuleIndex(new ActionListener<>() { @@ -169,7 +171,7 @@ public void onFailure(Exception e) { void prepareRuleIndexing() { String rule = request.getRule(); - String category = request.getLogType(); + String category = request.getLogType().toLowerCase(Locale.ROOT); try { SigmaRule parsedRule = SigmaRule.fromYaml(rule, true); @@ -180,8 +182,13 @@ void prepareRuleIndexing() { final QueryBackend backend = new OSQueryBackend(category, true, true); List queries = backend.convertRule(parsedRule); - - Rule ruleDoc = new Rule(NO_ID, NO_VERSION, parsedRule, category, queries.stream().map(Object::toString).collect(Collectors.toList()), rule); + Set queryFieldNames = backend.getQueryFields().keySet(); + Rule ruleDoc = new Rule( + NO_ID, NO_VERSION, parsedRule, category, + queries, + new ArrayList<>(queryFieldNames), + rule + ); indexRule(ruleDoc); } catch (IOException | SigmaError e) { onFailures(e); diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportListCorrelationAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportListCorrelationAction.java new file mode 100644 index 000000000..09a488175 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportListCorrelationAction.java @@ -0,0 +1,172 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.transport; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.OpenSearchStatusException; +import org.opensearch.action.ActionListener; +import org.opensearch.action.ActionRunnable; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.HandledTransportAction; +import org.opensearch.client.Client; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.inject.Inject; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.index.query.BoolQueryBuilder; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.rest.RestStatus; +import org.opensearch.search.SearchHit; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.securityanalytics.action.ListCorrelationsAction; +import org.opensearch.securityanalytics.action.ListCorrelationsRequest; +import org.opensearch.securityanalytics.action.ListCorrelationsResponse; +import org.opensearch.securityanalytics.model.CorrelatedFinding; +import org.opensearch.securityanalytics.util.CorrelationIndices; +import org.opensearch.securityanalytics.util.SecurityAnalyticsException; +import org.opensearch.tasks.Task; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; + +public class TransportListCorrelationAction extends HandledTransportAction implements SecureTransportAction { + + private static final Logger log = LogManager.getLogger(TransportListCorrelationAction.class); + + private final ClusterService clusterService; + + private final Settings settings; + + private final Client client; + + private final NamedXContentRegistry xContentRegistry; + + private final ThreadPool threadPool; + + @Inject + public TransportListCorrelationAction(TransportService transportService, + Client client, + NamedXContentRegistry xContentRegistry, + ClusterService clusterService, + Settings settings, + ActionFilters actionFilters) { + super(ListCorrelationsAction.NAME, transportService, actionFilters, ListCorrelationsRequest::new); + this.client = client; + this.xContentRegistry = xContentRegistry; + this.clusterService = clusterService; + this.settings = settings; + this.threadPool = this.client.threadPool(); + } + + @Override + protected void doExecute(Task task, ListCorrelationsRequest request, ActionListener actionListener) { + AsyncListCorrelationAction asyncAction = new AsyncListCorrelationAction(task, request, actionListener); + asyncAction.start(); + } + + class AsyncListCorrelationAction { + private ListCorrelationsRequest request; + private ActionListener listener; + + private final AtomicReference response; + private final AtomicBoolean counter = new AtomicBoolean(); + private final Task task; + + AsyncListCorrelationAction(Task task, ListCorrelationsRequest request, ActionListener listener) { + this.task = task; + this.request = request; + this.listener = listener; + + this.response =new AtomicReference<>(); + } + + @SuppressWarnings("unchecked") + void start() { + Long startTimestamp = request.getStartTimestamp(); + Long endTimestamp = request.getEndTimestamp(); + + BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery() + .mustNot(QueryBuilders.matchQuery( + "finding1", "" + )).mustNot(QueryBuilders.matchQuery( + "finding2", "" + )).filter(QueryBuilders.rangeQuery("timestamp") + .gte(startTimestamp) + .lte(endTimestamp)); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.query(queryBuilder); + searchSourceBuilder.fetchSource(true); + searchSourceBuilder.size(10000); + SearchRequest searchRequest = new SearchRequest(); + searchRequest.indices(CorrelationIndices.CORRELATION_INDEX); + searchRequest.source(searchSourceBuilder); + + client.search(searchRequest, new ActionListener<>() { + @Override + public void onResponse(SearchResponse response) { + if (response.isTimedOut()) { + onFailures(new OpenSearchStatusException(response.toString(), RestStatus.REQUEST_TIMEOUT)); + } + + List correlatedFindings = new ArrayList<>(); + Iterator hits = response.getHits().iterator(); + while (hits.hasNext()) { + SearchHit hit = hits.next(); + Map source = hit.getSourceAsMap(); + + CorrelatedFinding correlatedFinding = new CorrelatedFinding( + source.get("finding1").toString(), + source.get("logType").toString().split("-")[0], + source.get("finding2").toString(), + source.get("logType").toString().split("-")[1], + (List) source.get("corrRules")); + correlatedFindings.add(correlatedFinding); + } + onOperation(new ListCorrelationsResponse(correlatedFindings)); + } + + @Override + public void onFailure(Exception e) { + onFailures(e); + } + }); + } + + private void onOperation(ListCorrelationsResponse response) { + this.response.set(response); + if (counter.compareAndSet(false, true)) { + finishHim(response, null); + } + } + + private void onFailures(Exception t) { + if (counter.compareAndSet(false, true)) { + finishHim(null, t); + } + } + + private void finishHim(ListCorrelationsResponse response, Exception t) { + threadPool.executor(ThreadPool.Names.GENERIC).execute(ActionRunnable.supply(listener, () -> { + if (t != null) { + if (t instanceof OpenSearchStatusException) { + throw t; + } + throw SecurityAnalyticsException.wrap(t); + } else { + return response; + } + })); + } + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportSearchCorrelationAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportSearchCorrelationAction.java new file mode 100644 index 000000000..dde82e31f --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportSearchCorrelationAction.java @@ -0,0 +1,303 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.transport; + +import org.apache.commons.lang3.tuple.Pair; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.OpenSearchStatusException; +import org.opensearch.action.ActionListener; +import org.opensearch.action.ActionRunnable; +import org.opensearch.action.search.MultiSearchRequest; +import org.opensearch.action.search.MultiSearchResponse; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.HandledTransportAction; +import org.opensearch.client.Client; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.inject.Inject; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.index.query.BoolQueryBuilder; +import org.opensearch.index.query.MatchQueryBuilder; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.search.SearchHit; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.securityanalytics.action.CorrelatedFindingAction; +import org.opensearch.securityanalytics.action.CorrelatedFindingRequest; +import org.opensearch.securityanalytics.action.CorrelatedFindingResponse; +import org.opensearch.securityanalytics.config.monitors.DetectorMonitorConfig; +import org.opensearch.securityanalytics.correlation.index.query.CorrelationQueryBuilder; +import org.opensearch.securityanalytics.model.Detector; +import org.opensearch.securityanalytics.model.FindingWithScore; +import org.opensearch.securityanalytics.util.CorrelationIndices; +import org.opensearch.securityanalytics.util.SecurityAnalyticsException; +import org.opensearch.tasks.Task; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; + +public class TransportSearchCorrelationAction extends HandledTransportAction implements SecureTransportAction { + + private static final Logger log = LogManager.getLogger(TransportSearchCorrelationAction.class); + + private final ClusterService clusterService; + + private final Settings settings; + + private final Client client; + + private final NamedXContentRegistry xContentRegistry; + + private final ThreadPool threadPool; + + @Inject + public TransportSearchCorrelationAction(TransportService transportService, + Client client, + NamedXContentRegistry xContentRegistry, + ClusterService clusterService, + Settings settings, + ActionFilters actionFilters) { + super(CorrelatedFindingAction.NAME, transportService, actionFilters, CorrelatedFindingRequest::new); + this.client = client; + this.xContentRegistry = xContentRegistry; + this.clusterService = clusterService; + this.settings = settings; + this.threadPool = this.client.threadPool(); + } + + @Override + protected void doExecute(Task task, CorrelatedFindingRequest request, ActionListener actionListener) { + AsyncSearchCorrelationAction searchCorrelationAction = new AsyncSearchCorrelationAction(task, request, actionListener); + searchCorrelationAction.start(); + } + + class AsyncSearchCorrelationAction { + private CorrelatedFindingRequest request; + private ActionListener listener; + + private final AtomicReference response; + private final AtomicBoolean counter = new AtomicBoolean(); + private final Task task; + + AsyncSearchCorrelationAction(Task task, CorrelatedFindingRequest request, ActionListener listener) { + this.task = task; + this.request = request; + this.listener = listener; + + this.response =new AtomicReference<>(); + } + + @SuppressWarnings("unchecked") + void start() { + String findingId = request.getFindingId(); + Detector.DetectorType detectorType = request.getDetectorType(); + long timeWindow = request.getTimeWindow(); + int noOfNearbyFindings = request.getNoOfNearbyFindings(); + + MatchQueryBuilder queryBuilder = QueryBuilders.matchQuery( + "_id", findingId + ); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.query(queryBuilder); + searchSourceBuilder.fetchSource(false); + searchSourceBuilder.fetchField("timestamp"); + searchSourceBuilder.size(1); + SearchRequest searchRequest = new SearchRequest(); + searchRequest.indices(DetectorMonitorConfig.getAllFindingsIndicesPattern(detectorType.getDetectorType())); + searchRequest.source(searchSourceBuilder); + + client.search(searchRequest, new ActionListener<>() { + @Override + public void onResponse(SearchResponse response) { + SearchHit hit = response.getHits().getAt(0); + long findingTimestamp = hit.getFields().get("timestamp").getValue(); + + BoolQueryBuilder scoreQueryBuilder = QueryBuilders.boolQuery() + .mustNot(QueryBuilders.termQuery("scoreTimestamp", 0L)); + SearchSourceBuilder scoreSearchSourceBuilder = new SearchSourceBuilder(); + scoreSearchSourceBuilder.query(scoreQueryBuilder); + scoreSearchSourceBuilder.fetchSource(true); + scoreSearchSourceBuilder.size(1); + SearchRequest scoreSearchRequest = new SearchRequest(); + scoreSearchRequest.indices(CorrelationIndices.CORRELATION_INDEX); + scoreSearchRequest.source(scoreSearchSourceBuilder); + + client.search(scoreSearchRequest, new ActionListener<>() { + @Override + public void onResponse(SearchResponse response) { + Map hitSource = response.getHits().getHits()[0].getSourceAsMap(); + long scoreTimestamp = (long) hitSource.get("scoreTimestamp"); + + BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery() + .must(QueryBuilders.matchQuery( + "finding1", findingId + )).must(QueryBuilders.matchQuery( + "finding2", "" + )); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.query(queryBuilder); + searchSourceBuilder.fetchSource(false); + searchSourceBuilder.fetchField("counter"); + searchSourceBuilder.size(1); + SearchRequest searchRequest = new SearchRequest(); + searchRequest.indices(CorrelationIndices.CORRELATION_INDEX); + searchRequest.source(searchSourceBuilder); + + client.search(searchRequest, new ActionListener<>() { + @Override + public void onResponse(SearchResponse response) { + MultiSearchRequest mSearchRequest = new MultiSearchRequest(); + SearchHit[] hits = response.getHits().getHits(); + + for (SearchHit hit: hits) { + long counter = hit.getFields().get("counter").getValue(); + float[] query = new float[101]; + for (int i = 0; i < 100; ++i) { + query[i] = (2.0f * ((float) counter) - 50.0f) / 2.0f; + } + query[100] = Long.valueOf((findingTimestamp - scoreTimestamp) / 1000L).floatValue(); + + CorrelationQueryBuilder correlationQueryBuilder = new CorrelationQueryBuilder("corr_vector", query, noOfNearbyFindings, QueryBuilders.boolQuery() + .mustNot(QueryBuilders.matchQuery( + "finding1", "" + )).mustNot(QueryBuilders.matchQuery( + "finding2", "" + )).filter(QueryBuilders.rangeQuery("timestamp") + .gte(findingTimestamp - timeWindow) + .lte(findingTimestamp + timeWindow))); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.query(correlationQueryBuilder); + searchSourceBuilder.fetchSource(true); + searchSourceBuilder.size(noOfNearbyFindings); + SearchRequest searchRequest = new SearchRequest(); + searchRequest.indices(CorrelationIndices.CORRELATION_INDEX); + searchRequest.source(searchSourceBuilder); + + mSearchRequest.add(searchRequest); + } + + client.multiSearch(mSearchRequest, new ActionListener<>() { + @Override + public void onResponse(MultiSearchResponse items) { + MultiSearchResponse.Item[] responses = items.getResponses(); + Map, Pair>> correlatedFindings = new HashMap<>(); + + for (MultiSearchResponse.Item response : responses) { + if (response.isFailure()) { + log.info(response.getFailureMessage()); + continue; + } + + SearchHit[] hits = response.getResponse().getHits().getHits(); + for (SearchHit hit: hits) { + Map source = hit.getSourceAsMap(); + if (!source.get("finding1").toString().equals(findingId)) { + Pair findingKey1 = Pair.of(source.get("finding1").toString(), source.get("logType").toString().split("-")[0]); + + if (correlatedFindings.containsKey(findingKey1)) { + double score = Math.max(correlatedFindings.get(findingKey1).getLeft(), hit.getScore()); + Set rules = correlatedFindings.get(findingKey1).getRight(); + rules.addAll((List) source.get("corrRules")); + + correlatedFindings.put(findingKey1, Pair.of(score, rules)); + } else { + Set rules = new HashSet<>((List) source.get("corrRules")); + correlatedFindings.put(findingKey1, Pair.of((double) hit.getScore(), rules)); + } + } + if (!source.get("finding2").toString().equals(findingId)) { + Pair findingKey2 = Pair.of(source.get("finding2").toString(), source.get("logType").toString().split("-")[1]); + + if (correlatedFindings.containsKey(findingKey2)) { + double score = Math.max(correlatedFindings.get(findingKey2).getLeft(), hit.getScore()); + Set rules = correlatedFindings.get(findingKey2).getRight(); + rules.addAll((List) source.get("corrRules")); + + correlatedFindings.put(findingKey2, Pair.of(score, rules)); + } else { + Set rules = new HashSet<>((List) source.get("corrRules")); + correlatedFindings.put(findingKey2, Pair.of((double) hit.getScore(), rules)); + } + } + } + } + + List findingWithScores = new ArrayList<>(); + for (Map.Entry, Pair>> correlatedFinding: correlatedFindings.entrySet()) { + findingWithScores.add(new FindingWithScore(correlatedFinding.getKey().getKey(), correlatedFinding.getKey().getValue(), + correlatedFinding.getValue().getLeft(), new ArrayList<>(correlatedFinding.getValue().getRight()))); + } + + onOperation(new CorrelatedFindingResponse(findingWithScores)); + } + + @Override + public void onFailure(Exception e) { + onFailures(e); + } + }); + } + + @Override + public void onFailure(Exception e) { + onFailures(e); + } + }); + } + + @Override + public void onFailure(Exception e) { + onFailures(e); + } + }); + } + + @Override + public void onFailure(Exception e) { + onFailures(e); + } + }); + + } + + private void onOperation(CorrelatedFindingResponse response) { + this.response.set(response); + if (counter.compareAndSet(false, true)) { + finishHim(response, null); + } + } + + private void onFailures(Exception t) { + if (counter.compareAndSet(false, true)) { + finishHim(null, t); + } + } + + private void finishHim(CorrelatedFindingResponse response, Exception t) { + threadPool.executor(ThreadPool.Names.GENERIC).execute(ActionRunnable.supply(listener, () -> { + if (t != null) { + if (t instanceof OpenSearchStatusException) { + throw t; + } + throw SecurityAnalyticsException.wrap(t); + } else { + return response; + } + })); + } + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportSearchCorrelationRuleAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportSearchCorrelationRuleAction.java new file mode 100644 index 000000000..4affd451f --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportSearchCorrelationRuleAction.java @@ -0,0 +1,107 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.securityanalytics.transport; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.search.TotalHits; +import org.opensearch.action.ActionListener; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.search.ShardSearchFailure; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.HandledTransportAction; +import org.opensearch.client.Client; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.inject.Inject; +import org.opensearch.commons.notifications.action.SendNotificationRequest; +import org.opensearch.index.IndexNotFoundException; +import org.opensearch.search.SearchHit; +import org.opensearch.search.SearchHits; +import org.opensearch.search.internal.InternalSearchResponse; +import org.opensearch.securityanalytics.action.SearchCorrelationRuleAction; +import org.opensearch.securityanalytics.action.SearchCorrelationRuleRequest; +import org.opensearch.securityanalytics.util.CorrelationRuleIndices; +import org.opensearch.securityanalytics.util.SecurityAnalyticsException; +import org.opensearch.tasks.Task; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; + +public class TransportSearchCorrelationRuleAction extends HandledTransportAction { + + private static final Logger log = LogManager.getLogger(TransportSearchCorrelationRuleAction.class); + + private final Client client; + + private final CorrelationRuleIndices correlationRuleIndices; + + private final ClusterService clusterService; + + private final ThreadPool threadPool; + + private static final SearchResponse EMPTY_SEARCH_RESPONSE = new SearchResponse( + new InternalSearchResponse( + new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0f), + null, + null, + null, + false, + null, + 1 + ), + null, + 0, + 0, + 0, + 0, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY + ); + + + @Inject + public TransportSearchCorrelationRuleAction( + TransportService transportService, + Client client, + ActionFilters actionFilters, + ClusterService clusterService, + ThreadPool threadPool, + CorrelationRuleIndices correlationRuleIndices + ) { + super(SearchCorrelationRuleAction.NAME, transportService, actionFilters, SearchCorrelationRuleRequest::new); + this.client = client; + this.clusterService = clusterService; + this.correlationRuleIndices = correlationRuleIndices; + this.threadPool = threadPool; + } + + @Override + protected void doExecute(Task task, SearchCorrelationRuleRequest request, ActionListener listener) { + this.threadPool.getThreadContext().stashContext(); + + client.search( + request.getSearchRequest(), + new ActionListener<>() { + @Override + public void onResponse(SearchResponse response) { + listener.onResponse(response); + } + + @Override + public void onFailure(Exception e) { + if (e instanceof IndexNotFoundException) { + listener.onResponse(EMPTY_SEARCH_RESPONSE); + } else { + listener.onFailure(SecurityAnalyticsException.wrap(e)); + } + } + } + ); + } + +} diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportSearchDetectorAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportSearchDetectorAction.java index e669a0547..a46952cc6 100644 --- a/src/main/java/org/opensearch/securityanalytics/transport/TransportSearchDetectorAction.java +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportSearchDetectorAction.java @@ -7,45 +7,95 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.lucene.search.TotalHits; +import org.opensearch.OpenSearchStatusException; import org.opensearch.action.ActionListener; import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.search.SearchResponseSections; +import org.opensearch.action.search.ShardSearchFailure; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.HandledTransportAction; +import org.opensearch.commons.authuser.User; import org.opensearch.client.Client; import org.opensearch.common.inject.Inject; import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.settings.Settings; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.rest.RestStatus; -import org.opensearch.common.xcontent.NamedXContentRegistry; - +import org.opensearch.search.SearchHit; +import org.opensearch.search.SearchHits; +import org.opensearch.search.aggregations.InternalAggregations; +import org.opensearch.search.internal.InternalSearchResponse; +import org.opensearch.search.profile.SearchProfileShardResults; +import org.opensearch.search.suggest.Suggest; import org.opensearch.securityanalytics.action.SearchDetectorAction; import org.opensearch.securityanalytics.action.SearchDetectorRequest; +import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; +import org.opensearch.securityanalytics.util.DetectorIndices; +import org.opensearch.threadpool.ThreadPool; import org.opensearch.tasks.Task; import org.opensearch.transport.TransportService; +import java.util.Collections; +import java.util.Locale; + import static org.opensearch.rest.RestStatus.OK; +import static org.opensearch.securityanalytics.util.DetectorUtils.getEmptySearchResponse; -public class TransportSearchDetectorAction extends HandledTransportAction { +public class TransportSearchDetectorAction extends HandledTransportAction implements SecureTransportAction { private final Client client; private final NamedXContentRegistry xContentRegistry; + private final ClusterService clusterService; + + private final DetectorIndices detectorIndices; + + private final Settings settings; + + private final ThreadPool threadPool; + + private volatile Boolean filterByEnabled; + private static final Logger log = LogManager.getLogger(TransportSearchDetectorAction.class); @Inject - public TransportSearchDetectorAction(TransportService transportService, ActionFilters actionFilters, NamedXContentRegistry xContentRegistry, Client client) { + public TransportSearchDetectorAction(TransportService transportService, ClusterService clusterService, DetectorIndices detectorIndices, ActionFilters actionFilters, NamedXContentRegistry xContentRegistry, Settings settings, Client client) { super(SearchDetectorAction.NAME, transportService, actionFilters, SearchDetectorRequest::new); this.xContentRegistry = xContentRegistry; this.client = client; + this.detectorIndices = detectorIndices; + this.clusterService = clusterService; + this.threadPool = this.detectorIndices.getThreadPool(); + this.settings = settings; + + this.filterByEnabled = SecurityAnalyticsSettings.FILTER_BY_BACKEND_ROLES.get(this.settings); + + this.clusterService.getClusterSettings().addSettingsUpdateConsumer(SecurityAnalyticsSettings.FILTER_BY_BACKEND_ROLES, this::setFilterByEnabled); } @Override protected void doExecute(Task task, SearchDetectorRequest searchDetectorRequest, ActionListener actionListener) { + User user = readUserFromThreadContext(this.threadPool); + + if (doFilterForUser(user, this.filterByEnabled)) { + // security is enabled and filterby is enabled + log.info("Filtering result by: {}", user.getBackendRoles()); + addFilter(user, searchDetectorRequest.searchRequest().source(), "detector.user.backend_roles.keyword"); + } + + this.threadPool.getThreadContext().stashContext(); + if (!detectorIndices.detectorIndexExists()) { + actionListener.onResponse(getEmptySearchResponse()); + return; + } client.search(searchDetectorRequest.searchRequest(), new ActionListener<>() { @Override public void onResponse(SearchResponse response) { @@ -59,4 +109,8 @@ public void onFailure(Exception e) { }); } + private void setFilterByEnabled(boolean filterByEnabled) { + this.filterByEnabled = filterByEnabled; + } + } \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportSearchRuleAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportSearchRuleAction.java index 94dc97465..4f5f4c4d1 100644 --- a/src/main/java/org/opensearch/securityanalytics/transport/TransportSearchRuleAction.java +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportSearchRuleAction.java @@ -13,6 +13,7 @@ import org.opensearch.action.bulk.BulkResponse; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.search.ShardSearchFailure; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.HandledTransportAction; import org.opensearch.action.support.WriteRequest; @@ -24,6 +25,7 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.index.reindex.BulkByScrollResponse; import org.opensearch.rest.RestStatus; +import org.opensearch.search.internal.InternalSearchResponse; import org.opensearch.securityanalytics.action.SearchRuleAction; import org.opensearch.securityanalytics.action.SearchRuleRequest; import org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings; @@ -47,7 +49,6 @@ public class TransportSearchRuleAction extends HandledTransportAction() { @@ -189,7 +191,16 @@ public void onFailure(Exception e) { if (ruleIndices.ruleIndexExists(false)) { search(request.getSearchRequest()); } else { - onFailures(new IllegalArgumentException("Custom rule index doesnt exist. Please create custom rules first.")); + this.listener.onResponse(new SearchResponse( + InternalSearchResponse.empty(), + null, + 1, + 1, + 0, + 1, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY + )); } } } diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportUpdateIndexMappingsAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportUpdateIndexMappingsAction.java index 3a72981d5..3717e24b2 100644 --- a/src/main/java/org/opensearch/securityanalytics/transport/TransportUpdateIndexMappingsAction.java +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportUpdateIndexMappingsAction.java @@ -4,6 +4,7 @@ */ package org.opensearch.securityanalytics.transport; +import org.opensearch.OpenSearchStatusException; import org.opensearch.action.ActionListener; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.HandledTransportAction; @@ -11,10 +12,13 @@ import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.inject.Inject; +import org.opensearch.rest.RestStatus; import org.opensearch.securityanalytics.action.UpdateIndexMappingsAction; import org.opensearch.securityanalytics.mapper.MapperService; import org.opensearch.securityanalytics.action.UpdateIndexMappingsRequest; +import org.opensearch.securityanalytics.util.SecurityAnalyticsException; import org.opensearch.tasks.Task; +import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; import java.io.IOException; @@ -24,10 +28,13 @@ public class TransportUpdateIndexMappingsAction extends HandledTransportAction actionListener) { + this.threadPool.getThreadContext().stashContext(); try { IndexMetadata index = clusterService.state().metadata().index(request.getIndexName()); if (index == null) { - actionListener.onFailure(new IllegalStateException("Could not find index [" + request.getIndexName() + "]")); + actionListener.onFailure( + SecurityAnalyticsException.wrap( + new OpenSearchStatusException( + "Could not find index [" + request.getIndexName() + "]", RestStatus.NOT_FOUND + ) + ) + ); return; } mapperService.updateMappingAction( diff --git a/src/main/java/org/opensearch/securityanalytics/transport/TransportValidateRulesAction.java b/src/main/java/org/opensearch/securityanalytics/transport/TransportValidateRulesAction.java new file mode 100644 index 000000000..2023c25bc --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/transport/TransportValidateRulesAction.java @@ -0,0 +1,64 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.transport; + +import java.util.List; +import org.opensearch.OpenSearchStatusException; +import org.opensearch.action.ActionListener; +import org.opensearch.action.StepListener; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.HandledTransportAction; +import org.opensearch.client.Client; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.inject.Inject; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.rest.RestStatus; +import org.opensearch.securityanalytics.action.ValidateRulesAction; +import org.opensearch.securityanalytics.action.ValidateRulesRequest; +import org.opensearch.securityanalytics.action.ValidateRulesResponse; +import org.opensearch.securityanalytics.util.RuleValidator; +import org.opensearch.securityanalytics.util.SecurityAnalyticsException; +import org.opensearch.tasks.Task; +import org.opensearch.transport.TransportService; + +public class TransportValidateRulesAction extends HandledTransportAction { + + private final RuleValidator ruleValidator; + private final ClusterService clusterService; + + @Inject + public TransportValidateRulesAction( + TransportService transportService, + ActionFilters actionFilters, + ClusterService clusterService, + Client client, + NamedXContentRegistry namedXContentRegistry + ) { + super(ValidateRulesAction.NAME, transportService, actionFilters, ValidateRulesRequest::new); + this.clusterService = clusterService; + this.ruleValidator = new RuleValidator(client, namedXContentRegistry); + } + + @Override + protected void doExecute(Task task, ValidateRulesRequest request, ActionListener actionListener) { + IndexMetadata index = clusterService.state().metadata().index(request.getIndexName()); + if (index == null) { + actionListener.onFailure( + SecurityAnalyticsException.wrap( + new OpenSearchStatusException( + "Could not find index [" + request.getIndexName() + "]", RestStatus.NOT_FOUND + ) + ) + ); + return; + } + StepListener> validateRulesResponseListener = new StepListener(); + validateRulesResponseListener.whenComplete(validateRulesResponse -> { + actionListener.onResponse(new ValidateRulesResponse(validateRulesResponse)); + }, actionListener::onFailure); + ruleValidator.validateCustomRules(request.getRules(), request.getIndexName(), validateRulesResponseListener); + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/util/CorrelationIndices.java b/src/main/java/org/opensearch/securityanalytics/util/CorrelationIndices.java new file mode 100644 index 000000000..a68064853 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/util/CorrelationIndices.java @@ -0,0 +1,115 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.util; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.OpenSearchStatusException; +import org.opensearch.action.ActionListener; +import org.opensearch.action.admin.indices.create.CreateIndexRequest; +import org.opensearch.action.admin.indices.create.CreateIndexResponse; +import org.opensearch.action.bulk.BulkRequest; +import org.opensearch.action.bulk.BulkResponse; +import org.opensearch.action.index.IndexRequest; +import org.opensearch.action.support.WriteRequest; +import org.opensearch.client.Client; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.health.ClusterIndexHealth; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.cluster.routing.IndexRoutingTable; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.rest.RestStatus; + +import java.io.IOException; +import java.nio.charset.Charset; +import java.util.Objects; + +public class CorrelationIndices { + + private static final Logger log = LogManager.getLogger(CorrelationIndices.class); + public static final String CORRELATION_INDEX = ".opensearch-sap-correlation-history"; + public static final long FIXED_HISTORICAL_INTERVAL = 24L * 60L * 60L * 20L * 1000L; + + private final Client client; + + private final ClusterService clusterService; + + public CorrelationIndices(Client client, ClusterService clusterService) { + this.client = client; + this.clusterService = clusterService; + } + + public static String correlationMappings() throws IOException { + return new String(Objects.requireNonNull(CorrelationIndices.class.getClassLoader().getResourceAsStream("mappings/correlation.json")).readAllBytes(), Charset.defaultCharset()); + } + + public void initCorrelationIndex(ActionListener actionListener) throws IOException { + if (!correlationIndexExists()) { + CreateIndexRequest indexRequest = new CreateIndexRequest(CORRELATION_INDEX) + .mapping(correlationMappings()) + .settings(Settings.builder().put("index.hidden", true).put("index.correlation", true).build()); + client.admin().indices().create(indexRequest, actionListener); + } + } + + public boolean correlationIndexExists() { + ClusterState clusterState = clusterService.state(); + return clusterState.getRoutingTable().hasIndex(CORRELATION_INDEX); + } + + public void setupCorrelationIndex(TimeValue indexTimeout, Long setupTimestamp, ActionListener listener) { + try { + long currentTimestamp = System.currentTimeMillis(); + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + builder.field("root", true); + builder.field("counter", 0L); + builder.field("finding1", ""); + builder.field("finding2", ""); + builder.field("logType", ""); + builder.field("timestamp", currentTimestamp); + builder.field("scoreTimestamp", 0L); + builder.endObject(); + + IndexRequest indexRequest = new IndexRequest(CorrelationIndices.CORRELATION_INDEX) + .source(builder) + .timeout(indexTimeout); + + XContentBuilder scoreBuilder = XContentFactory.jsonBuilder().startObject(); + scoreBuilder.field("scoreTimestamp", setupTimestamp); + scoreBuilder.field("root", false); + scoreBuilder.endObject(); + + IndexRequest scoreIndexRequest = new IndexRequest(CorrelationIndices.CORRELATION_INDEX) + .source(scoreBuilder) + .timeout(indexTimeout); + + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(indexRequest); + bulkRequest.add(scoreIndexRequest); + bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + + client.bulk(bulkRequest, listener); + } catch (IOException ex) { + log.error(ex); + } + } + + public ClusterIndexHealth correlationIndexHealth() { + ClusterIndexHealth indexHealth = null; + + if (correlationIndexExists()) { + IndexRoutingTable indexRoutingTable = clusterService.state().routingTable().index(CORRELATION_INDEX); + IndexMetadata indexMetadata = clusterService.state().metadata().index(CORRELATION_INDEX); + + indexHealth = new ClusterIndexHealth(indexMetadata, indexRoutingTable); + } + return indexHealth; + } +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/util/CorrelationRuleIndices.java b/src/main/java/org/opensearch/securityanalytics/util/CorrelationRuleIndices.java new file mode 100644 index 000000000..69eb83bc7 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/util/CorrelationRuleIndices.java @@ -0,0 +1,59 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.securityanalytics.util; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.action.ActionListener; +import org.opensearch.action.admin.indices.create.CreateIndexRequest; +import org.opensearch.action.admin.indices.create.CreateIndexResponse; +import org.opensearch.client.Client; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.Settings; + +import java.io.IOException; +import java.nio.charset.Charset; +import java.util.Objects; +import org.opensearch.securityanalytics.model.CorrelationRule; + +public class CorrelationRuleIndices { + private static final Logger log = LogManager.getLogger(CorrelationRuleIndices.class); + + private final Client client; + + private final ClusterService clusterService; + + public CorrelationRuleIndices(Client client, ClusterService clusterService) { + this.client = client; + this.clusterService = clusterService; + } + + public static String correlationRuleIndexMappings() throws IOException { + return new String( + Objects.requireNonNull(CorrelationRuleIndices.class.getClassLoader().getResourceAsStream("mappings/correlation-rules.json")) + .readAllBytes(), + Charset.defaultCharset() + ); + } + + public void initCorrelationRuleIndex(ActionListener actionListener) throws IOException { + if (!correlationRuleIndexExists()) { + CreateIndexRequest indexRequest = new CreateIndexRequest(CorrelationRule.CORRELATION_RULE_INDEX).mapping( + correlationRuleIndexMappings() + ).settings(Settings.builder().put("index.hidden", true).build()); + client.admin().indices().create(indexRequest, actionListener); + } + } + + public boolean correlationRuleIndexExists() { + ClusterState clusterState = clusterService.state(); + return clusterState.getRoutingTable().hasIndex(CorrelationRule.CORRELATION_RULE_INDEX); + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/util/DetectorUtils.java b/src/main/java/org/opensearch/securityanalytics/util/DetectorUtils.java index d28e242b6..4a60a8fcb 100644 --- a/src/main/java/org/opensearch/securityanalytics/util/DetectorUtils.java +++ b/src/main/java/org/opensearch/securityanalytics/util/DetectorUtils.java @@ -4,27 +4,49 @@ */ package org.opensearch.securityanalytics.util; +import org.apache.lucene.search.TotalHits; +import org.opensearch.action.ActionListener; +import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; -import org.opensearch.common.bytes.BytesReference; +import org.opensearch.action.search.ShardSearchFailure; +import org.opensearch.client.Client; import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.common.xcontent.NamedXContentRegistry; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.search.SearchHit; +import org.opensearch.search.SearchHits; +import org.opensearch.search.aggregations.InternalAggregations; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.search.fetch.subphase.FetchSourceContext; +import org.opensearch.search.internal.InternalSearchResponse; +import org.opensearch.search.profile.SearchProfileShardResults; +import org.opensearch.search.suggest.Suggest; import org.opensearch.securityanalytics.model.Detector; +import org.opensearch.securityanalytics.model.DetectorInput; import java.io.IOException; +import java.util.Collections; +import java.util.HashSet; import java.util.LinkedList; import java.util.List; +import java.util.Set; public class DetectorUtils { public static final String DETECTOR_TYPE_PATH = "detector.detector_type"; public static final String DETECTOR_ID_FIELD = "detector_id"; + public static SearchResponse getEmptySearchResponse() { + return new SearchResponse(new InternalSearchResponse( + new SearchHits(new SearchHit[0], new TotalHits(0L, TotalHits.Relation.EQUAL_TO), 0.0f), + InternalAggregations.from(Collections.emptyList()), + new Suggest(Collections.emptyList()), + new SearchProfileShardResults(Collections.emptyMap()), false, false, 0), + "", 0, 0, 0, 0, + ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + } + public static List getDetectors(SearchResponse response, NamedXContentRegistry xContentRegistry) throws IOException { List detectors = new LinkedList<>(); for (SearchHit hit : response.getHits()) { @@ -36,4 +58,39 @@ public static List getDetectors(SearchResponse response, NamedXContent } return detectors; } + + public static void getAllDetectorInputs(Client client, NamedXContentRegistry xContentRegistry, ActionListener> actionListener) { + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.fetchSource(FetchSourceContext.FETCH_SOURCE); + searchSourceBuilder.seqNoAndPrimaryTerm(true); + searchSourceBuilder.version(true); + + SearchRequest searchRequest = new SearchRequest(); + searchRequest.source(searchSourceBuilder); + searchRequest.indices(Detector.DETECTORS_INDEX); + + client.search(searchRequest, new ActionListener<>() { + @Override + public void onResponse(SearchResponse response) { + Set allDetectorIndices = new HashSet<>(); + try { + List detectors = DetectorUtils.getDetectors(response, xContentRegistry); + for (Detector detector : detectors) { + for (DetectorInput input : detector.getInputs()) { + allDetectorIndices.addAll(input.getIndices()); + } + } + } catch (IOException e) { + actionListener.onFailure(e); + } + actionListener.onResponse(allDetectorIndices); + } + + @Override + public void onFailure(Exception e) { + actionListener.onFailure(e); + } + }); + } } \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/util/IndexUtils.java b/src/main/java/org/opensearch/securityanalytics/util/IndexUtils.java index 1bd1ff00f..0280c93c4 100644 --- a/src/main/java/org/opensearch/securityanalytics/util/IndexUtils.java +++ b/src/main/java/org/opensearch/securityanalytics/util/IndexUtils.java @@ -4,16 +4,20 @@ */ package org.opensearch.securityanalytics.util; +import java.util.SortedMap; import org.opensearch.action.ActionListener; import org.opensearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.opensearch.action.support.IndicesOptions; import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.client.IndicesAdminClient; import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.IndexAbstraction; import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.common.xcontent.NamedXContentRegistry; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; import java.io.IOException; import java.util.HashMap; @@ -30,6 +34,8 @@ public class IndexUtils { public static Boolean detectorIndexUpdated = false; public static Boolean customRuleIndexUpdated = false; public static Boolean prePackagedRuleIndexUpdated = false; + public static Boolean correlationIndexUpdated = false; + public static Boolean correlationRuleIndexUpdated = false; public static void detectorIndexUpdated() { detectorIndexUpdated = true; @@ -43,6 +49,12 @@ public static void prePackagedRuleIndexUpdated() { prePackagedRuleIndexUpdated = true; } + public static void correlationIndexUpdated() { correlationIndexUpdated = true; } + + public static void correlationRuleIndexUpdated() { + correlationRuleIndexUpdated = true; + } + public static Integer getSchemaVersion(String mapping) throws IOException { XContentParser xcp = XContentType.JSON.xContent().createParser( NamedXContentRegistry.EMPTY, @@ -105,4 +117,57 @@ public static void updateIndexMapping( } } } + + public static boolean isDataStream(String name, ClusterState clusterState) { + return clusterState.getMetadata().dataStreams().containsKey(name); + } + public static boolean isAlias(String indexName, ClusterState clusterState) { + return clusterState.getMetadata().hasAlias(indexName); + } + public static String getWriteIndex(String indexName, ClusterState clusterState) { + if(isAlias(indexName, clusterState) || isDataStream(indexName, clusterState)) { + IndexMetadata metadata = clusterState.getMetadata() + .getIndicesLookup() + .get(indexName).getWriteIndex(); + if (metadata != null) { + return metadata.getIndex().getName(); + } + } + return null; + } + + public static boolean isConcreteIndex(String indexName, ClusterState clusterState) { + IndexAbstraction indexAbstraction = clusterState.getMetadata() + .getIndicesLookup() + .get(indexName); + + if (indexAbstraction != null) { + return indexAbstraction.getType() == IndexAbstraction.Type.CONCRETE_INDEX; + } else { + return false; + } + } + + public static String getNewestIndexByCreationDate(String[] concreteIndices, ClusterState clusterState) { + final SortedMap lookup = clusterState.getMetadata().getIndicesLookup(); + long maxCreationDate = Long.MIN_VALUE; + String newestIndex = null; + for (String indexName : concreteIndices) { + IndexAbstraction index = lookup.get(indexName); + IndexMetadata indexMetadata = clusterState.getMetadata().index(indexName); + if(index != null && index.getType() == IndexAbstraction.Type.CONCRETE_INDEX) { + if (indexMetadata.getCreationDate() > maxCreationDate) { + maxCreationDate = indexMetadata.getCreationDate(); + newestIndex = indexName; + } + } + } + return newestIndex; + } + + public static String getNewIndexByCreationDate(ClusterState state, IndexNameExpressionResolver i, String index) { + String[] strings = i.concreteIndexNames(state, IndicesOptions.LENIENT_EXPAND_OPEN, index); + return getNewestIndexByCreationDate(strings, state); + } + } \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/util/RuleIndices.java b/src/main/java/org/opensearch/securityanalytics/util/RuleIndices.java index ad86f69c9..9a51569f0 100644 --- a/src/main/java/org/opensearch/securityanalytics/util/RuleIndices.java +++ b/src/main/java/org/opensearch/securityanalytics/util/RuleIndices.java @@ -4,12 +4,16 @@ */ package org.opensearch.securityanalytics.util; +import java.util.Set; + import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.OpenSearchStatusException; import org.opensearch.action.ActionListener; import org.opensearch.action.admin.indices.create.CreateIndexRequest; import org.opensearch.action.admin.indices.create.CreateIndexResponse; +import org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest; +import org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.opensearch.action.bulk.BulkRequest; import org.opensearch.action.bulk.BulkResponse; import org.opensearch.action.index.IndexRequest; @@ -21,19 +25,21 @@ import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.health.ClusterIndexHealth; import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.cluster.routing.IndexRoutingTable; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.io.PathUtils; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.xcontent.ToXContent; import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.reindex.BulkByScrollResponse; import org.opensearch.index.reindex.DeleteByQueryAction; import org.opensearch.index.reindex.DeleteByQueryRequestBuilder; import org.opensearch.rest.RestStatus; import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.securityanalytics.mapper.MapperUtils; import org.opensearch.securityanalytics.model.Detector; import org.opensearch.securityanalytics.model.Rule; import org.opensearch.securityanalytics.rules.backend.OSQueryBackend; @@ -87,9 +93,13 @@ public static String ruleMappings() throws IOException { public void initRuleIndex(ActionListener actionListener, boolean isPrepackaged) throws IOException { if (!ruleIndexExists(isPrepackaged)) { + Settings indexSettings = Settings.builder() + .put("index.hidden", true) + .put("index.auto_expand_replicas", "0-all") + .build(); CreateIndexRequest indexRequest = new CreateIndexRequest(getRuleIndex(isPrepackaged)) .mapping(ruleMappings()) - .settings(Settings.builder().put("index.hidden", true).build()); + .settings(indexSettings); client.admin().indices().create(indexRequest, actionListener); } } @@ -248,9 +258,7 @@ private void loadQueries(Path path, WriteRequest.RefreshPolicy refreshPolicy, Ti } private String getRuleCategory(Path folderPath) { - String folder = folderPath.toString(); - int idx = folder.lastIndexOf(PathUtils.getDefaultFileSystem().getSeparator()); - return folder.substring(idx+1); + return folderPath.getFileName().toString(); } private void ingestQueries(Map> logIndexToRules, WriteRequest.RefreshPolicy refreshPolicy, TimeValue indexTimeout, ActionListener listener) throws SigmaError, IOException { @@ -281,9 +289,16 @@ private List getQueries(QueryBackend backend, String category, List queries = new ArrayList<>(); for (String ruleStr: rules) { SigmaRule rule = SigmaRule.fromYaml(ruleStr, true); + backend.resetQueryFields(); List ruleQueries = backend.convertRule(rule); - - Rule ruleModel = new Rule(rule.getId().toString(), NO_VERSION, rule, category, ruleQueries.stream().map(Object::toString).collect(Collectors.toList()), ruleStr); + Set queryFieldNames = backend.getQueryFields().keySet(); + + Rule ruleModel = new Rule( + rule.getId().toString(), NO_VERSION, rule, category, + ruleQueries.stream().map(Object::toString).collect(Collectors.toList()), + new ArrayList<>(queryFieldNames), + ruleStr + ); queries.add(ruleModel); } return queries; diff --git a/src/main/java/org/opensearch/securityanalytics/util/RuleTopicIndices.java b/src/main/java/org/opensearch/securityanalytics/util/RuleTopicIndices.java index 06d6e1c46..b866bf2a6 100644 --- a/src/main/java/org/opensearch/securityanalytics/util/RuleTopicIndices.java +++ b/src/main/java/org/opensearch/securityanalytics/util/RuleTopicIndices.java @@ -4,26 +4,24 @@ */ package org.opensearch.securityanalytics.util; +import java.io.IOException; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.action.ActionListener; -import org.opensearch.action.admin.indices.create.CreateIndexRequest; -import org.opensearch.action.admin.indices.create.CreateIndexResponse; -import org.opensearch.action.admin.indices.delete.DeleteIndexRequest; -import org.opensearch.action.search.SearchRequest; -import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.admin.indices.template.put.PutComposableIndexTemplateAction; import org.opensearch.action.support.master.AcknowledgedResponse; -import org.opensearch.client.AdminClient; import org.opensearch.client.Client; import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.ComposableIndexTemplate; +import org.opensearch.cluster.metadata.Template; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentType; -import org.opensearch.search.builder.SearchSourceBuilder; - -import java.io.IOException; -import java.nio.charset.Charset; -import java.util.Objects; +import org.opensearch.securityanalytics.config.monitors.DetectorMonitorConfig; public class RuleTopicIndices { private static final Logger log = LogManager.getLogger(DetectorIndices.class); @@ -37,40 +35,47 @@ public RuleTopicIndices(Client client, ClusterService clusterService) { this.clusterService = clusterService; } - public static String ruleTopicIndexMappings() throws IOException { - return new String(Objects.requireNonNull(DetectorIndices.class.getClassLoader().getResourceAsStream("mappings/detector-queries.json")).readAllBytes(), Charset.defaultCharset()); - } - public static String ruleTopicIndexSettings() throws IOException { return new String(Objects.requireNonNull(DetectorIndices.class.getClassLoader().getResourceAsStream("mappings/detector-settings.json")).readAllBytes(), Charset.defaultCharset()); } - public void initRuleTopicIndex(String ruleTopicIndex, ActionListener actionListener) throws IOException { - if (!ruleTopicIndexExists(ruleTopicIndex)) { - CreateIndexRequest indexRequest = new CreateIndexRequest(ruleTopicIndex) - .mapping(ruleTopicIndexMappings()) - .settings(Settings.builder().loadFromSource(ruleTopicIndexSettings(), XContentType.JSON).build()); - client.admin().indices().create(indexRequest, actionListener); - } else { - actionListener.onResponse(new CreateIndexResponse(true, true, ruleTopicIndex)); - } - } + public void initRuleTopicIndexTemplate(ActionListener actionListener) throws IOException { + if (!ruleTopicIndexTemplateExists()) { + // Compose list of all patterns to cover all query indices + List indexPatterns = new ArrayList<>(); + for(String ruleIndex : DetectorMonitorConfig.getAllRuleIndices()) { + indexPatterns.add(ruleIndex + "*"); + } - public void deleteRuleTopicIndex(String ruleTopicIndex, ActionListener actionListener) throws IOException { - if (ruleTopicIndexExists(ruleTopicIndex)) { - DeleteIndexRequest request = new DeleteIndexRequest(ruleTopicIndex); - client.admin().indices().delete(request, actionListener); - } - } + ComposableIndexTemplate template = new ComposableIndexTemplate( + indexPatterns, + new Template( + Settings.builder().loadFromSource(ruleTopicIndexSettings(), XContentType.JSON).build(), + null, + null + ), + null, + 500L, + null, + null + ); + + client.execute( + PutComposableIndexTemplateAction.INSTANCE, + new PutComposableIndexTemplateAction.Request(DetectorMonitorConfig.OPENSEARCH_SAP_RULE_INDEX_TEMPLATE) + .indexTemplate(template) + .create(true), + actionListener + ); - public void countQueries(String ruleTopicIndex, ActionListener listener) { - SearchRequest request = new SearchRequest(ruleTopicIndex) - .source(new SearchSourceBuilder().size(0)); - client.search(request, listener); + } else { + actionListener.onResponse(new AcknowledgedResponse(true)); + } } - public boolean ruleTopicIndexExists(String ruleTopicIndex) { + public boolean ruleTopicIndexTemplateExists() { ClusterState clusterState = clusterService.state(); - return clusterState.getRoutingTable().hasIndex(ruleTopicIndex); + return clusterState.metadata().templatesV2() + .get(DetectorMonitorConfig.OPENSEARCH_SAP_RULE_INDEX_TEMPLATE) != null; } } \ No newline at end of file diff --git a/src/main/java/org/opensearch/securityanalytics/util/RuleValidator.java b/src/main/java/org/opensearch/securityanalytics/util/RuleValidator.java new file mode 100644 index 000000000..937265908 --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/util/RuleValidator.java @@ -0,0 +1,115 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.util; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; +import org.opensearch.action.ActionListener; +import org.opensearch.action.StepListener; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.client.Client; +import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.search.SearchHit; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.search.fetch.subphase.FetchSourceContext; +import org.opensearch.securityanalytics.action.GetMappingsViewAction; +import org.opensearch.securityanalytics.action.GetMappingsViewRequest; +import org.opensearch.securityanalytics.action.GetMappingsViewResponse; +import org.opensearch.securityanalytics.action.SearchRuleAction; +import org.opensearch.securityanalytics.action.SearchRuleRequest; +import org.opensearch.securityanalytics.mapper.MapperUtils; +import org.opensearch.securityanalytics.model.Rule; + +public class RuleValidator +{ + private final static int MAX_RULES_TO_VALIDATE = 1000; + + private final static String RULE_ID = "_id"; + + private final Client client; + private final NamedXContentRegistry namedXContentRegistry; + + public RuleValidator(Client client, NamedXContentRegistry namedXContentRegistry) { + this.client = client; + this.namedXContentRegistry = namedXContentRegistry; + } + + public void validateCustomRules(List ruleIds, String indexName, ActionListener> listener) { + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.fetchSource(FetchSourceContext.FETCH_SOURCE); + + QueryBuilder queryBuilder = QueryBuilders.termsQuery( RULE_ID, ruleIds.toArray(new String[]{})); + SearchRequest searchRequest = new SearchRequest(Rule.CUSTOM_RULES_INDEX) + .source(new SearchSourceBuilder() + .seqNoAndPrimaryTerm(false) + .version(false) + .query(queryBuilder) + .fetchSource(FetchSourceContext.FETCH_SOURCE) + .size(MAX_RULES_TO_VALIDATE) + ) + .indices(Rule.CUSTOM_RULES_INDEX); + + StepListener searchRuleResponseListener = new StepListener(); + searchRuleResponseListener.whenComplete(searchRuleResponse -> { + + List rules = getRules(searchRuleResponse, namedXContentRegistry); + validateRules(rules, indexName, listener); + + }, listener::onFailure); + client.execute(SearchRuleAction.INSTANCE, new SearchRuleRequest(false, searchRequest), searchRuleResponseListener); + } + + private void validateRules(List rules, String indexName, ActionListener> listener) { + // Get index mappings + String ruleTopic = rules.get(0).getCategory(); + StepListener getMappingsViewResponseListener = new StepListener(); + getMappingsViewResponseListener.whenComplete(getMappingsViewResponse -> { + + List nonapplicableRuleIds = new ArrayList<>(); + for(Rule r : rules) { + // We will check against all index fields and applicable template aliases too + List allIndexFields = MapperUtils.extractAllFieldsFlat(getMappingsViewResponse.getAliasMappings()); + allIndexFields.addAll(getMappingsViewResponse.getUnmappedIndexFields()); + // check if all rule fields are present in index fields + List missingRuleFields = r.getQueryFieldNames() + .stream() + .map(e -> e.getValue()) + .filter(e -> allIndexFields.contains(e) == false) + .collect(Collectors.toList()); + + if (missingRuleFields.size() > 0) { + nonapplicableRuleIds.add(r.getId()); + } + } + listener.onResponse(nonapplicableRuleIds); + }, listener::onFailure); + client.execute( + GetMappingsViewAction.INSTANCE, + new GetMappingsViewRequest(indexName, ruleTopic), + getMappingsViewResponseListener + ); + } + + public static List getRules(SearchResponse response, NamedXContentRegistry xContentRegistry) throws IOException { + List rules = new ArrayList<>((int) response.getHits().getTotalHits().value); + for (SearchHit hit : response.getHits()) { + XContentParser xcp = XContentType.JSON.xContent().createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, hit.getSourceAsString()); + Rule rule = Rule.docParse(xcp, hit.getId(), hit.getVersion()); + rules.add(rule); + } + return rules; + } +} diff --git a/src/main/java/org/opensearch/securityanalytics/util/SecurityAnalyticsException.java b/src/main/java/org/opensearch/securityanalytics/util/SecurityAnalyticsException.java index 340ccab1c..257fb3e24 100644 --- a/src/main/java/org/opensearch/securityanalytics/util/SecurityAnalyticsException.java +++ b/src/main/java/org/opensearch/securityanalytics/util/SecurityAnalyticsException.java @@ -8,8 +8,8 @@ import org.apache.logging.log4j.Logger; import org.opensearch.OpenSearchException; import org.opensearch.common.Strings; -import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.rest.RestStatus; import java.io.IOException; @@ -39,7 +39,7 @@ public RestStatus status() { } public static OpenSearchException wrap(Exception ex) { - log.error(String.format(Locale.getDefault(), "Security Analytics error: %s", ex.getMessage())); + log.error("Security Analytics error:", ex); String friendlyMsg = "Unknown error"; RestStatus status = RestStatus.INTERNAL_SERVER_ERROR; @@ -51,6 +51,19 @@ public static OpenSearchException wrap(Exception ex) { return new SecurityAnalyticsException(friendlyMsg, status, new Exception(String.format(Locale.getDefault(), "%s: %s", ex.getClass().getName(), ex.getMessage()))); } + public static OpenSearchException wrap(OpenSearchException ex) { + log.error("Security Analytics error:", ex); + + String friendlyMsg = "Unknown error"; + RestStatus status = ex.status(); + + if (!Strings.isNullOrEmpty(ex.getMessage())) { + friendlyMsg = ex.getMessage(); + } + + return new SecurityAnalyticsException(friendlyMsg, status, new Exception(String.format(Locale.getDefault(), "%s: %s", ex.getClass().getName(), ex.getMessage()))); + } + public static OpenSearchException wrap(List ex) { try { RestStatus status = RestStatus.BAD_REQUEST; @@ -58,10 +71,10 @@ public static OpenSearchException wrap(List ex) { XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); for (Exception e: ex) { builder.field("error", e.getMessage()); + log.error("Security Analytics error:", e); } builder.endObject(); String friendlyMsg = Strings.toString(builder); - log.error(String.format(Locale.getDefault(), "Security Analytics error: %s", friendlyMsg)); return new SecurityAnalyticsException(friendlyMsg, status, new Exception(String.format(Locale.getDefault(), "%s: %s", ex.getClass().getName(), friendlyMsg))); } catch (IOException e) { diff --git a/src/main/java/org/opensearch/securityanalytics/util/XContentUtils.java b/src/main/java/org/opensearch/securityanalytics/util/XContentUtils.java new file mode 100644 index 000000000..1b3c21e8b --- /dev/null +++ b/src/main/java/org/opensearch/securityanalytics/util/XContentUtils.java @@ -0,0 +1,29 @@ +/* +Copyright OpenSearch Contributors +SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.util; + +import java.io.IOException; +import java.util.Map; +import org.opensearch.common.bytes.BytesReference; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.common.xcontent.XContentHelper; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.xcontent.XContentBuilder; + +public class XContentUtils { + + public static String parseMapToJsonString(Map map) throws IOException { + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + builder.map(map); + return XContentHelper.convertToJson( + BytesReference.bytes(builder), + false, + false, + builder.contentType() + ); + } + +} \ No newline at end of file diff --git a/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec new file mode 100644 index 000000000..89f846a4d --- /dev/null +++ b/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -0,0 +1 @@ +org.opensearch.securityanalytics.correlation.index.codec.correlation950.CorrelationCodec \ No newline at end of file diff --git a/src/main/resources/OSMapping/ad_ldap/fieldmappings.yml b/src/main/resources/OSMapping/ad_ldap/fieldmappings.yml index 9ab79f08f..5ec90ee14 100644 --- a/src/main/resources/OSMapping/ad_ldap/fieldmappings.yml +++ b/src/main/resources/OSMapping/ad_ldap/fieldmappings.yml @@ -1,7 +1,25 @@ # this file provides pre-defined mappings for Sigma fields defined for all Sigma rules under windows log group to their corresponding ECS Fields. fieldmappings: - EventID: event_uid - HiveName: unmapped.HiveName - fieldB: mappedB - fieldA1: mappedA - CommandLine: windows-event_data-CommandLine + TargetUserName: azure-signinlogs-properties-user_id + creationTime: timestamp + Category: azure-activitylogs-category + OperationName: azure-platformlogs-operation_name + ModifiedProperties_NewValue: modified_properties-new_value + ResourceProviderValue: azure-resource-provider + conditionalAccessStatus: azure-signinlogs-properties-conditional_access_status + SearchFilter: SearchFilter + Operation: azure-platformlogs-operation_name + ResultType: azure-platformlogs-result_type + DeviceDetail_isCompliant: azure-signinlogs-properties-device_detail-is_compliant + ResourceDisplayName: resource_display_name + AuthenticationRequirement: azure-signinlogs-properties-authentication_requirement + TargetResources: target_resources + Workload: workload + DeviceDetail.deviceId: azure-signinlogs-properties-device_detail-device_id + OperationNameValue: azure-platformlogs-operation_name + ResourceId: azure-signinlogs-properties-resource_id + ResultDescription: azure-signinlogs-result_description + EventID: EventID + NetworkLocationDetails: azure-signinlogs-properties-network_location_details + CategoryValue: azure-activitylogs-category + ActivityDisplayName: azure-auditlogs-properties-activity_display_name diff --git a/src/main/resources/OSMapping/ad_ldap/mappings.json b/src/main/resources/OSMapping/ad_ldap/mappings.json index ea77f2460..3bd2ae2b5 100644 --- a/src/main/resources/OSMapping/ad_ldap/mappings.json +++ b/src/main/resources/OSMapping/ad_ldap/mappings.json @@ -1,12 +1,84 @@ { "properties": { - "windows-event_data-CommandLine": { - "type": "alias", - "path": "CommandLine" + "azure-signinlogs-properties-user_id": { + "path": "azure.signinlogs.props.user_id", + "type": "alias" }, - "event_uid": { - "type": "alias", - "path": "EventID" + "azure-activitylogs-category": { + "path": "azure.activitylogs.category", + "type": "alias" + }, + "azure-platformlogs-operation_name": { + "path": "azure.platformlogs.operation_name", + "type": "alias" + }, + "modified_properties-new_value": { + "path": "modified_properties.new_value", + "type": "alias" + }, + "azure-resource-provider": { + "path": "azure.resource.provider", + "type": "alias" + }, + "azure-signinlogs-properties-conditional_access_status": { + "path": "azure.signinlogs.props.conditional_access_status", + "type": "alias" + }, + "SearchFilter": { + "path": "SearchFilter", + "type": "alias" + }, + "azure-platformlogs-result_type": { + "path": "azure.platformlogs.result_type", + "type": "alias" + }, + "azure-signinlogs-properties-device_detail-is_compliant": { + "path": "azure.signinlogs.props.device_detail.is_compliant", + "type": "alias" + }, + "ResourceDisplayName": { + "path": "ResourceDisplayName", + "type": "alias" + }, + "azure-signinlogs-properties-authentication_requirement": { + "path": "azure.signinlogs.props.authentication_requirement", + "type": "alias" + }, + "TargetResources": { + "path": "TargetResources", + "type": "alias" + }, + "Workload": { + "path": "Workload", + "type": "alias" + }, + "azure-signinlogs-properties-device_detail-device_id": { + "path": "azure.signinlogs.props.device_detail.device_id", + "type": "alias" + }, + "azure-signinlogs-properties-resource_id": { + "path": "azure.signinlogs.props.resource_id", + "type": "alias" + }, + "azure-signinlogs-result_description": { + "path": "azure.signinlogs.result_description", + "type": "alias" + }, + "EventID": { + "path": "EventID", + "type": "alias" + }, + "azure-signinlogs-properties-network_location_details": { + "path": "azure.signinlogs.props.network_location_details", + "type": "alias" + }, + "azure-auditlogs-properties-activity_display_name": { + "path": "azure.auditlogs.props.activity_display_name", + "type": "alias" + }, + "timestamp": { + "path": "@timestamp", + "type": "alias" } } } \ No newline at end of file diff --git a/src/main/resources/OSMapping/apache_access/fieldmappings.yml b/src/main/resources/OSMapping/apache_access/fieldmappings.yml index 9ab79f08f..9f649aeee 100644 --- a/src/main/resources/OSMapping/apache_access/fieldmappings.yml +++ b/src/main/resources/OSMapping/apache_access/fieldmappings.yml @@ -5,3 +5,4 @@ fieldmappings: fieldB: mappedB fieldA1: mappedA CommandLine: windows-event_data-CommandLine + creationTime: timestamp diff --git a/src/main/resources/OSMapping/apache_access/mappings.json b/src/main/resources/OSMapping/apache_access/mappings.json index ea77f2460..dc7fc31b5 100644 --- a/src/main/resources/OSMapping/apache_access/mappings.json +++ b/src/main/resources/OSMapping/apache_access/mappings.json @@ -7,6 +7,10 @@ "event_uid": { "type": "alias", "path": "EventID" + }, + "timestamp": { + "path": "creationTime", + "type": "alias" } } } \ No newline at end of file diff --git a/src/main/resources/OSMapping/azure/fieldmappings.yml b/src/main/resources/OSMapping/azure/fieldmappings.yml new file mode 100644 index 000000000..91a4c4407 --- /dev/null +++ b/src/main/resources/OSMapping/azure/fieldmappings.yml @@ -0,0 +1,31 @@ +fieldmappings: + Resultdescription: azure-signinlogs-result_description + eventSource: eventSource + eventName: eventName + Status: azure-platformlogs-status + LoggedByService: azure-auditlogs-properties-logged_by_service + properties_message: properties_message + status: azure-platformlogs-status + TargetUserName: azure-signinlogs-properties-user_id + creationTime: timestamp + Category: azure-activitylogs-category + OperationName: azure-platformlogs-operation_name + ModifiedProperties_NewValue: modified_properties-new_value + ResourceProviderValue: azure-resource-provider + conditionalAccessStatus: azure-signinlogs-properties-conditional_access_status + SearchFilter: search_filter + Operation: azure-platformlogs-operation_name + ResultType: azure-platformlogs-result_type + DeviceDetail_isCompliant: azure-signinlogs-properties-device_detail-is_compliant + ResourceDisplayName: resource_display_name + AuthenticationRequirement: azure-signinlogs-properties-authentication_requirement + TargetResources: target_resources + Workload: Workload + DeviceDetail_deviceId: azure-signinlogs-properties-device_detail-device_id + OperationNameValue: azure-platformlogs-operation_name + ResourceId: azure-signinlogs-properties-resource_id + ResultDescription: azure-signinlogs-result-description + EventID: EventID + NetworkLocationDetails: azure-signinlogs-properties-network_location_details + CategoryValue: azure-activitylogs-category + ActivityDisplayName: azure-auditlogs-properties-activity_display_name diff --git a/src/main/resources/OSMapping/azure/mappings.json b/src/main/resources/OSMapping/azure/mappings.json new file mode 100644 index 000000000..003f29d73 --- /dev/null +++ b/src/main/resources/OSMapping/azure/mappings.json @@ -0,0 +1,104 @@ +{ + "properties": { + "azure-signinlogs-properties-user_id": { + "path": "azure.signinlogs.props.user_id", + "type": "alias" + }, + "azure-activitylogs-category": { + "path": "azure.activitylogs.category", + "type": "alias" + }, + "azure-platformlogs-operation_name": { + "path": "azure.platformlogs.operation_name", + "type": "alias" + }, + "modified_properties-new_value": { + "path": "modified_properties.new_value", + "type": "alias" + }, + "azure-resource-provider": { + "path": "azure.resource.provider", + "type": "alias" + }, + "azure-signinlogs-properties-conditional_access_status": { + "path": "azure.signinlogs.props.conditional_access_status", + "type": "alias" + }, + "SearchFilter": { + "path": "SearchFilter", + "type": "alias" + }, + "azure-platformlogs-result_type": { + "path": "azure.platformlogs.result_type", + "type": "alias" + }, + "azure-signinlogs-properties-device_detail-is_compliant": { + "path": "azure.signinlogs.props.device_detail.is_compliant", + "type": "alias" + }, + "ResourceDisplayName": { + "path": "ResourceDisplayName", + "type": "alias" + }, + "azure-signinlogs-properties-authentication_requirement": { + "path": "azure.signinlogs.props.authentication_requirement", + "type": "alias" + }, + "TargetResources": { + "path": "TargetResources", + "type": "alias" + }, + "Workload": { + "path": "Workload", + "type": "alias" + }, + "azure-signinlogs-properties-device_detail-device_id": { + "path": "azure.signinlogs.props.device_detail.device_id", + "type": "alias" + }, + "azure-signinlogs-properties-resource_id": { + "path": "azure.signinlogs.props.resource_id", + "type": "alias" + }, + "EventID": { + "path": "EventID", + "type": "alias" + }, + "azure-signinlogs-properties-network_location_details": { + "path": "azure.signinlogs.props.network_location_details", + "type": "alias" + }, + "azure-auditlogs-properties-activity_display_name": { + "path": "azure.auditlogs.props.activity_display_name", + "type": "alias" + }, + "azure-signinlogs-result-description": { + "path": "azure.signinlogs.result-description", + "type": "alias" + }, + "eventSource": { + "path": "eventSource", + "type": "alias" + }, + "eventName": { + "path": "eventName", + "type": "alias" + }, + "azure-platformlogs-status": { + "path": "azure.platformlogs.status", + "type": "alias" + }, + "azure-auditlogs-properties-logged_by_service": { + "path": "azure.auditlogs.props.logged_by_service", + "type": "alias" + }, + "properties_message": { + "path": "properties_message", + "type": "alias" + }, + "timestamp": { + "path": "@timestamp", + "type": "alias" + } + } +} \ No newline at end of file diff --git a/src/main/resources/OSMapping/cloudtrail/fieldmappings.yml b/src/main/resources/OSMapping/cloudtrail/fieldmappings.yml index 9ab79f08f..68f459e50 100644 --- a/src/main/resources/OSMapping/cloudtrail/fieldmappings.yml +++ b/src/main/resources/OSMapping/cloudtrail/fieldmappings.yml @@ -1,7 +1,17 @@ -# this file provides pre-defined mappings for Sigma fields defined for all Sigma rules under windows log group to their corresponding ECS Fields. fieldmappings: - EventID: event_uid - HiveName: unmapped.HiveName - fieldB: mappedB - fieldA1: mappedA - CommandLine: windows-event_data-CommandLine + eventName: aws-cloudtrail-event_name + eventSource: aws-cloudtrail-event_source + eventType: aws-cloudtrail-event_type + errorMessage: aws-cloudtrail-error_message + errorCode: aws-cloudtrail-error_code + responseElements: aws-cloudtrail-response_elements-text + responseElements.pendingModifiedValues.masterUserPassword: aws-cloudtrail-response_elements-pending_modified_values-master_user_password + responseElements.publiclyAccessible: aws-cloudtrail-response_elements-publicly_accessible + requestParameters.arn: aws-cloudtrail-request_parameters-arn + requestParameters.attribute: aws-cloudtrail-request_parameters-attribute + requestParameters.userName: aws-cloudtrail-request_parameters-username + requestParameters.containerDefinitions.command: aws-cloudtrail-request_parameters-container_definitions-command + userIdentity.type: aws-cloudtrail-user_identity-type + userIdentity.arn: aws-cloudtrail-user_identity-arn + userIdentity.sessionContext.sessionIssuer.type: aws-cloudtrail-user_identity-session_context-session_issuer-type + eventTime: timestamp \ No newline at end of file diff --git a/src/main/resources/OSMapping/cloudtrail/mappings.json b/src/main/resources/OSMapping/cloudtrail/mappings.json index ea77f2460..ec730def0 100644 --- a/src/main/resources/OSMapping/cloudtrail/mappings.json +++ b/src/main/resources/OSMapping/cloudtrail/mappings.json @@ -1,12 +1,120 @@ { "properties": { - "windows-event_data-CommandLine": { + "cloud.account.id": { "type": "alias", - "path": "CommandLine" + "path": "aws.cloudtrail.recipient_account_id" }, - "event_uid": { + "cloud.region": { "type": "alias", - "path": "EventID" + "path": "region_id" + }, + "source.geo.country_iso_code": { + "type": "alias", + "path": "src_country_iso_code" + }, + "source.geo.country_name": { + "type": "alias", + "path": "src_geo_country_name" + }, + "source.as.organization.name": { + "type": "alias", + "path": "src_as_org_name" + }, + "source.ip": { + "type": "alias", + "path": "src_ip" + }, + "userIdentity.arn": { + "type": "alias", + "path": "aws.cloudtrail.user_identity.arn" + }, + "eventName": { + "type": "alias", + "path": "aws.cloudtrail.eventName" + }, + "eventType": { + "type": "alias", + "path": "aws.cloudtrail.eventType" + }, + "errorCode": { + "type": "alias", + "path": "aws.cloudtrail.error_code" + }, + "eventSource": { + "type": "alias", + "path": "aws.cloudtrail.eventType" + }, + "tlsDetails.tlsVersion": { + "type": "alias", + "path": "tls_version" + }, + "user_agent.name": { + "type": "alias", + "path": "user_agent" + }, + "threat.matched.providers": { + "type": "alias", + "path": "thread_matched_providers" + }, + "aws-cloudtrail-event_name": { + "type": "alias", + "path": "aws.cloudtrail.event_name" + }, + "aws-cloudtrail-event_source": { + "type": "alias", + "path": "aws.cloudtrail.event_source" + }, + "aws-cloudtrail-event_type": { + "type": "alias", + "path": "aws.cloudtrail.event_type" + }, + "aws-cloudtrail-error_message": { + "type": "alias", + "path": "aws.cloudtrail.error_message" + }, + "aws-cloudtrail-error_code": { + "type": "alias", + "path": "aws.cloudtrail.error_code" + }, + "aws-cloudtrail-response_elements-text": { + "type": "alias", + "path": "aws.cloudtrail.response_elements.text" + }, + "aws-cloudtrail-response_elements-pending_modified_values-master_user_password": { + "type": "alias", + "path": "aws.cloudtrail.response_elements.pending_modified_values.master_user_password" + }, + "aws-cloudtrail-response_elements-publicly_accessible": { + "type": "alias", + "path": "aws.cloudtrail.response_elements.publicly_accessible" + }, + "aws-cloudtrail-request_parameters-arn": { + "type": "alias", + "path": "aws.cloudtrail.request_parameters.arn" + }, + "aws-cloudtrail-request_parameters-attribute": { + "type": "alias", + "path": "aws.cloudtrail.request_parameters.attribute" + }, + "aws-cloudtrail-request_parameters-username": { + "type": "alias", + "path": "aws.cloudtrail.request_parameters.username" + }, + "aws-cloudtrail-request_parameters-container_definitions-command": { + "type": "alias", + "path": "aws.cloudtrail.request_parameters.container_definitions.command" + }, + "aws-cloudtrail-user_identity-session_context-session_issuer-type": { + "type": "alias", + "path": "aws.cloudtrail.user_identity.session_context.session_issuer.type" + }, + "aws-cloudtrail-user_identity-arn": { + "type": "alias", + "path": "aws-cloudtrail-user_identity-arn" + }, + "timestamp": { + "path": "@timestamp", + "type": "alias" } } } \ No newline at end of file diff --git a/src/main/resources/OSMapping/dns/fieldmappings.yml b/src/main/resources/OSMapping/dns/fieldmappings.yml index 9ab79f08f..7672ee77a 100644 --- a/src/main/resources/OSMapping/dns/fieldmappings.yml +++ b/src/main/resources/OSMapping/dns/fieldmappings.yml @@ -1,7 +1,5 @@ -# this file provides pre-defined mappings for Sigma fields defined for all Sigma rules under windows log group to their corresponding ECS Fields. fieldmappings: - EventID: event_uid - HiveName: unmapped.HiveName - fieldB: mappedB - fieldA1: mappedA - CommandLine: windows-event_data-CommandLine + record_type: dns-answers-type + query: dns-question-name + parent_domain: dns-question-registered_domain + creationTime: timestamp \ No newline at end of file diff --git a/src/main/resources/OSMapping/dns/mappings.json b/src/main/resources/OSMapping/dns/mappings.json index ea77f2460..0905e2c9b 100644 --- a/src/main/resources/OSMapping/dns/mappings.json +++ b/src/main/resources/OSMapping/dns/mappings.json @@ -1,12 +1,20 @@ { "properties": { - "windows-event_data-CommandLine": { + "dns-answers-type": { "type": "alias", - "path": "CommandLine" + "path": "dns.answers.type" }, - "event_uid": { + "dns-question-name": { "type": "alias", - "path": "EventID" + "path": "dns.question.name" + }, + "dns-question-registered_domain": { + "type": "alias", + "path": "dns.question.registered_domain" + }, + "timestamp": { + "path": "creationTime", + "type": "alias" } } -} \ No newline at end of file +} diff --git a/src/main/resources/OSMapping/github/fieldmappings.yml b/src/main/resources/OSMapping/github/fieldmappings.yml new file mode 100644 index 000000000..8b0c86406 --- /dev/null +++ b/src/main/resources/OSMapping/github/fieldmappings.yml @@ -0,0 +1,2 @@ +fieldmappings: + action: github-action \ No newline at end of file diff --git a/src/main/resources/OSMapping/github/mappings.json b/src/main/resources/OSMapping/github/mappings.json new file mode 100644 index 000000000..6041dcbcb --- /dev/null +++ b/src/main/resources/OSMapping/github/mappings.json @@ -0,0 +1,8 @@ +{ + "properties": { + "github-action": { + "type": "alias", + "path": "github.action" + } + } +} \ No newline at end of file diff --git a/src/main/resources/OSMapping/gworkspace/fieldmappings.yml b/src/main/resources/OSMapping/gworkspace/fieldmappings.yml new file mode 100644 index 000000000..ef8e00a72 --- /dev/null +++ b/src/main/resources/OSMapping/gworkspace/fieldmappings.yml @@ -0,0 +1,4 @@ +fieldmappings: + eventSource: google_workspace-admin-service-name + eventName: google_workspace-event-name # not sure if beat is missing this one... I see only google_workspace.event.type which IS NOT eventName + new_value: google_workspace-admin-new_value diff --git a/src/main/resources/OSMapping/gworkspace/mappings.json b/src/main/resources/OSMapping/gworkspace/mappings.json new file mode 100644 index 000000000..15ff468f2 --- /dev/null +++ b/src/main/resources/OSMapping/gworkspace/mappings.json @@ -0,0 +1,16 @@ +{ + "properties": { + "google_workspace-admin-service-name": { + "type": "alias", + "path": "google_workspace.admin.service.name" + }, + "google_workspace-event-name": { + "type": "alias", + "path": "google_workspace.event.name" + }, + "google_workspace-admin-new_value": { + "type": "alias", + "path": "google_workspace.admin.new_value" + } + } +} \ No newline at end of file diff --git a/src/main/resources/OSMapping/linux/fieldmappings.yml b/src/main/resources/OSMapping/linux/fieldmappings.yml index ef1c2c0c6..d58617df6 100644 --- a/src/main/resources/OSMapping/linux/fieldmappings.yml +++ b/src/main/resources/OSMapping/linux/fieldmappings.yml @@ -1,7 +1,15 @@ -# this file provides pre-defined mappings for Sigma fields defined for all Sigma rules under linux log group to their corresponding ECS Fields. fieldmappings: - EventID: event_uid - HiveName: unmapped.HiveName - fieldB: mappedB - fieldA1: mappedA - + name: user-filesystem-name + a0: auditd-log-a0 + comm: auditd-log-comm + exe: auditd-log-exe + uid: auditd-log-uid + USER: system-auth-user + User: system-auth-user + Image: process-exe + DestinationHostname: rsa-web-remote_domain + CommandLine: process-command_line + ParentImage: process-parent-executable + CurrentDirectory: process-working_directory + LogonId: process-real_user-id + creationTime: timestamp diff --git a/src/main/resources/OSMapping/linux/mappings.json b/src/main/resources/OSMapping/linux/mappings.json new file mode 100644 index 000000000..5c69c0c48 --- /dev/null +++ b/src/main/resources/OSMapping/linux/mappings.json @@ -0,0 +1,56 @@ +{ + "properties": { + "user-filesystem-name": { + "path": "user.filesystem.name", + "type": "alias" + }, + "auditd-log-a0": { + "path": "auditd.log.a0", + "type": "alias" + }, + "auditd-log-comm": { + "path": "auditd.log.comm", + "type": "alias" + }, + "auditd-log-exe": { + "path": "auditd.log.exe", + "type": "alias" + }, + "auditd-log-uid": { + "path": "auditd.log.uid", + "type": "alias" + }, + "system-auth-user": { + "path": "system.auth.user", + "type": "alias" + }, + "process-exe": { + "path": "process.exe", + "type": "alias" + }, + "rsa-web-remote_domain": { + "path": "rsa.web.remote_domain", + "type": "alias" + }, + "process-command_line": { + "path": "process.command_line", + "type": "alias" + }, + "process-parent-executable": { + "path": "process.parent.executable", + "type": "alias" + }, + "process-working_directory": { + "path": "process.working_directory", + "type": "alias" + }, + "process-real_user-id": { + "path": "process.real_user.id", + "type": "alias" + }, + "timestamp": { + "path": "creationTime", + "type": "alias" + } + } +} \ No newline at end of file diff --git a/src/main/resources/OSMapping/m365/fieldmappings.yml b/src/main/resources/OSMapping/m365/fieldmappings.yml new file mode 100644 index 000000000..6f641a816 --- /dev/null +++ b/src/main/resources/OSMapping/m365/fieldmappings.yml @@ -0,0 +1,5 @@ +fieldmappings: + eventSource: rsa-misc-event_source + eventName: rsa-misc-event_desc + status: rsa-misc-status + Payload: rsa-misc-payload_dst \ No newline at end of file diff --git a/src/main/resources/OSMapping/m365/mappings.json b/src/main/resources/OSMapping/m365/mappings.json new file mode 100644 index 000000000..98c586796 --- /dev/null +++ b/src/main/resources/OSMapping/m365/mappings.json @@ -0,0 +1,20 @@ +{ + "properties": { + "rsa-misc-event_desc": { + "type": "alias", + "path": "rsa.misc.event_desc" + }, + "rsa-misc-event_source": { + "type": "alias", + "path": "rsa.misc.event_source" + }, + "rsa-misc-status": { + "type": "alias", + "path": "rsa.misc.status" + }, + "rsa-misc-payload_dst": { + "type": "alias", + "path": "rsa.misc.payload_dst" + } + } +} \ No newline at end of file diff --git a/src/main/resources/OSMapping/mapper_topics.json b/src/main/resources/OSMapping/mapper_topics.json index 930adba62..bb3b0625e 100644 --- a/src/main/resources/OSMapping/mapper_topics.json +++ b/src/main/resources/OSMapping/mapper_topics.json @@ -1,6 +1,23 @@ { "netflow": "OSMapping/network/NetFlowMapping.json", - "macos": "OSMapping/macos/mappings.json", + "ad_ldap": "OSMapping/ad_ldap/mappings.json", + "apache_access": "OSMapping/apache_access/mappings.json", + "cloudtrail": "OSMapping/cloudtrail/mappings.json", + "dns": "OSMapping/dns/mappings.json", "network": "OSMapping/network/mappings.json", - "windows": "OSMapping/windows/mappings.json" + "others_application": "OSMapping/others_application/mappings.json", + "others_apt": "OSMapping/others_apt/mappings.json", + "others_cloud": "OSMapping/others_cloud/mappings.json", + "others_compliance": "OSMapping/others_compliance/mappings.json", + "others_macos": "OSMapping/others_macos/mappings.json", + "others_proxy": "OSMapping/others_proxy/mappings.json", + "others_web": "OSMapping/others_web/mappings.json", + "s3": "OSMapping/s3/mappings.json", + "okta": "OSMapping/okta/mappings.json", + "m365": "OSMapping/m365/mappings.json", + "gworkspace": "OSMapping/gworkspace/mappings.json", + "github": "OSMapping/github/mappings.json", + "azure": "OSMapping/azure/mappings.json", + "windows": "OSMapping/windows/mappings.json", + "test_windows": "OSMapping/test_windows/mappings.json" } \ No newline at end of file diff --git a/src/main/resources/OSMapping/network/NetFlowMapping.json b/src/main/resources/OSMapping/network/NetFlowMapping.json index 2bc0d4d76..9e404a67e 100644 --- a/src/main/resources/OSMapping/network/NetFlowMapping.json +++ b/src/main/resources/OSMapping/network/NetFlowMapping.json @@ -23,6 +23,10 @@ "http.response.status_code": { "type": "alias", "path": "netflow.http_status_code" + }, + "timestamp": { + "path": "creationTime", + "type": "alias" } } } \ No newline at end of file diff --git a/src/main/resources/OSMapping/network/fieldmappings.yml b/src/main/resources/OSMapping/network/fieldmappings.yml index 768d22ba5..2a10f24f4 100644 --- a/src/main/resources/OSMapping/network/fieldmappings.yml +++ b/src/main/resources/OSMapping/network/fieldmappings.yml @@ -1,7 +1,18 @@ -# this file provides pre-defined mappings for Sigma fields defined for all Sigma rules under network log group to their corresponding ECS Fields. - fieldmappings: - EventID: event_uid - HiveName: unmapped.HiveName - fieldB: mappedB - fieldA1: mappedA + action: netflow-firewall_event + certificate.serial: zeek-x509-certificate-serial + name: zeek-smb_files-name + path: zeek-smb_files-path + dst_port: netflow-tcp_destination_port + netflow-destination_transport_port: netflow-destination_transport_port + qtype_name: zeek-dns-qtype_name + operation: zeek-dce_rpc-operation + endpoint: zeek-dce_rpc-endpoint + zeek-dce_rpc-endpoint: zeek-dce_rpc-endpoint + answers: zeek-dns-answers + query: zeek-dns-query + client_header_names: zeek-http-client_header_names + resp_mime_types: zeek-http-resp_mime_types + cipher: zeek-kerberos-cipher + request_type: zeek-kerberos-request_type + creationTime: timestamp \ No newline at end of file diff --git a/src/main/resources/OSMapping/network/mappings.json b/src/main/resources/OSMapping/network/mappings.json index eb39c49f4..a3743673e 100644 --- a/src/main/resources/OSMapping/network/mappings.json +++ b/src/main/resources/OSMapping/network/mappings.json @@ -1,8 +1,92 @@ { "properties": { - "source_ip": { - "type": "alias", - "path": "src_ip" + "cloud.account.id": { + "path": "account_id", + "type": "alias" + }, + "cloud.region": { + "path": "region", + "type": "alias" + }, + "network.packets": { + "path": "network_packets", + "type": "alias" + }, + "source.packets": { + "path": "source_packets", + "type": "alias" + }, + "source.ip": { + "path": "source_ip", + "type": "alias" + }, + "source.geo.country_iso_code": { + "path": "source_geo_country_iso_code", + "type": "alias" + }, + "zeek-smb_files-name": { + "path": "zeek.smb_files.name", + "type": "alias" + }, + "zeek-x509-certificate-serial": { + "path": "zeek.x509-certificate.serial", + "type": "alias" + }, + "netflow-tcp_destination_port": { + "path": "netflow.tcp_destination_port", + "type": "alias" + }, + "netflow-destination_transport_port": { + "path": "netflow-destination_transport_port", + "type": "alias" + }, + "netflow-firewall_event": { + "path": "netflow.firewall_event", + "type": "alias" + }, + "zeek-smb_files-path": { + "path": "zeek.smb_files.path", + "type": "alias" + }, + "zeek-dns-qtype_name": { + "path": "zeek.dns.qtype_name", + "type": "alias" + }, + "zeek-dce_rpc-endpoint": { + "path": "zeek.dce_rpc.endpoint", + "type": "alias" + }, + "zeek-dce_rpc-operation": { + "path": "zeek.dce_rpc.operation", + "type": "alias" + }, + "zeek-dns-answers": { + "path": "zeek.dns.answers", + "type": "alias" + }, + "zeek-dns-query": { + "path": "zeek.dns.query", + "type": "alias" + }, + "zeek-http-client_header_names": { + "path": "zeek.http.client_header_names", + "type": "alias" + }, + "zeek-http-resp_mime_types": { + "path": "zeek.http.resp_mime_types", + "type": "alias" + }, + "zeek-kerberos-cipher": { + "path": "zeek.kerberos.cipher", + "type": "alias" + }, + "zeek-kerberos-request_type": { + "path": "zeek.kerberos.request_type", + "type": "alias" + }, + "timestamp": { + "path": "creationTime", + "type": "alias" } } -} \ No newline at end of file +} diff --git a/src/main/resources/OSMapping/okta/fieldmappings.yml b/src/main/resources/OSMapping/okta/fieldmappings.yml new file mode 100644 index 000000000..939428d44 --- /dev/null +++ b/src/main/resources/OSMapping/okta/fieldmappings.yml @@ -0,0 +1,3 @@ +fieldmappings: + eventtype: okta-event_type + displaymessage: okta-display_message \ No newline at end of file diff --git a/src/main/resources/OSMapping/okta/mappings.json b/src/main/resources/OSMapping/okta/mappings.json new file mode 100644 index 000000000..7a4f300ba --- /dev/null +++ b/src/main/resources/OSMapping/okta/mappings.json @@ -0,0 +1,12 @@ +{ + "properties": { + "okta-event_type": { + "type": "alias", + "path": "okta.event_type" + }, + "okta-display_message": { + "type": "alias", + "path": "okta.display_message" + } + } +} \ No newline at end of file diff --git a/src/main/resources/OSMapping/others_application/fieldmappings.yml b/src/main/resources/OSMapping/others_application/fieldmappings.yml index e1dba4476..853eb1cba 100644 --- a/src/main/resources/OSMapping/others_application/fieldmappings.yml +++ b/src/main/resources/OSMapping/others_application/fieldmappings.yml @@ -1,7 +1,4 @@ # this file provides pre-defined mappings for Sigma fields defined for all Sigma rules under application log group to their corresponding ECS Fields. fieldmappings: - EventID: event_uid - HiveName: unmapped.HiveName - fieldB: mappedB - fieldA1: mappedA - + Signature: abusech-malware-signature + Filename: file-name \ No newline at end of file diff --git a/src/main/resources/OSMapping/others_application/mappings.json b/src/main/resources/OSMapping/others_application/mappings.json new file mode 100644 index 000000000..33a27986b --- /dev/null +++ b/src/main/resources/OSMapping/others_application/mappings.json @@ -0,0 +1,12 @@ +{ + "properties": { + "abusech-malware-signature": { + "type": "alias", + "path": "abusech.malware.signature" + }, + "file-name": { + "type": "alias", + "path": "file.name" + } + } +} \ No newline at end of file diff --git a/src/main/resources/OSMapping/others_apt/fieldmappings.yml b/src/main/resources/OSMapping/others_apt/fieldmappings.yml index a25dd9693..26234ae87 100644 --- a/src/main/resources/OSMapping/others_apt/fieldmappings.yml +++ b/src/main/resources/OSMapping/others_apt/fieldmappings.yml @@ -1,7 +1,4 @@ # this file provides pre-defined mappings for Sigma fields defined for all Sigma rules under apt log group to their corresponding ECS Fields. fieldmappings: - EventID: event_uid - HiveName: unmapped.HiveName - fieldB: mappedB - fieldA1: mappedA - + Image: process-exe + CommandLine: process-command_line diff --git a/src/main/resources/OSMapping/others_apt/mappings.json b/src/main/resources/OSMapping/others_apt/mappings.json new file mode 100644 index 000000000..2f12a6177 --- /dev/null +++ b/src/main/resources/OSMapping/others_apt/mappings.json @@ -0,0 +1,12 @@ +{ + "properties": { + "process-exe": { + "type": "alias", + "path": "process.exe" + }, + "process-command_line": { + "type": "alias", + "path": "process.command_line" + } + } +} \ No newline at end of file diff --git a/src/main/resources/OSMapping/others_cloud/fieldmappings.yml b/src/main/resources/OSMapping/others_cloud/fieldmappings.yml index 85af60cec..ea0592520 100644 --- a/src/main/resources/OSMapping/others_cloud/fieldmappings.yml +++ b/src/main/resources/OSMapping/others_cloud/fieldmappings.yml @@ -1,7 +1,24 @@ # this file provides pre-defined mappings for Sigma fields defined for all Sigma rules under cloud log group to their corresponding ECS Fields. fieldmappings: - EventID: event_uid - HiveName: unmapped.HiveName - fieldB: mappedB - fieldA1: mappedA + eventSource: winlog-provider_name + status: azure-platformlogs-status + eventService: winlog-event_data-ServiceName + ResultType: azure-platformlogs-result_type + ResultDescription: azure-signinlogs-result_description + Operation: azure-activitylogs-operation_name + Resultdescription: azure-signinlogs-result_description + AuthenticationRequirement: azure-signinlogs-properties-authentication_requirement + Status: azure-platformlogs-status + OperationName: azure-auditlogs-operation_name + ResourceId: azure-resource-id + OperationNameValue: azure-auditlogs-operation_name + TargetResources: azure-auditlogs-properties-target_resources + NetworkLocationDetails: azure-signinlogs-properties-network_location_details + DeviceDetail.deviceId: azure-signinlogs-properties-device_detail-device_id + ResourceDisplayName: azure-signinlogs-properties-resource_display_name + conditionalAccessStatus: azure-signinlogs-properties-conditional_access_status + LoggedByService: azure-auditlogs-properties-logged_by_service + DeviceDetail.isCompliant: azure-signinlogs-properties-device_detail-is_compliant + ActivityDisplayName: azure-auditlogs-properties-activity_display_name + gcp.audit.method_name: gcp-audit-method_name diff --git a/src/main/resources/OSMapping/others_cloud/mappings.json b/src/main/resources/OSMapping/others_cloud/mappings.json new file mode 100644 index 000000000..dfd43cc08 --- /dev/null +++ b/src/main/resources/OSMapping/others_cloud/mappings.json @@ -0,0 +1,76 @@ +{ + "properties": { + "winlog-provider_name": { + "path": "winlog.provider_name", + "type": "alias" + }, + "azure-platformlogs-status": { + "path": "azure.platformlogs.status", + "type": "alias" + }, + "winlog-event_data-ServiceName": { + "path": "winlog.event_data.ServiceName", + "type": "alias" + }, + "azure-platformlogs-result_type": { + "path": "azure.platformlogs.result_type", + "type": "alias" + }, + "azure-signinlogs-result_description": { + "path": "azure.signinlogs.result_description", + "type": "alias" + }, + "azure-activitylogs-operation_name": { + "path": "azure.activitylogs.operation_name", + "type": "alias" + }, + "azure-signinlogs-properties-authentication_requirement": { + "path": "azure.signinlogs.props.authentication_requirement", + "type": "alias" + }, + "azure-auditlogs-operation_name": { + "path": "azure.auditlogs.operation_name", + "type": "alias" + }, + "azure-resource-id": { + "path": "azure.resource.id", + "type": "alias" + }, + "azure-auditlogs-properties-target_resources": { + "path": "azure.auditlogs.props.target_resources", + "type": "alias" + }, + "azure-signinlogs-properties-network_location_details": { + "path": "azure.signinlogs.props.network_location_details", + "type": "alias" + }, + "azure-signinlogs-properties-device_detail-device_id": { + "path": "azure.signinlogs.props.device_detail.device_id", + "type": "alias" + }, + "azure-signinlogs-properties-resource_display_name": { + "path": "azure.signinlogs.props.resource_display_name", + "type": "alias" + }, + "azure-signinlogs-properties-conditional_access_status": { + "path": "azure.signinlogs.props.conditional_access_status", + "type": "alias" + }, + "azure-auditlogs-properties-logged_by_service": { + "path": "azure.auditlogs.props.logged_by_service", + "type": "alias" + }, + "azure-signinlogs-properties-device_detail-is_compliant": { + "path": "azure.signinlogs.props.device_detail.is_compliant", + "type": "alias" + }, + "azure-auditlogs-properties-activity_display_name": { + "path": "azure.auditlogs.props.activity_display_name", + "type": "alias" + }, + "gcp-audit-method_name": { + "path": "gcp.audit.method_name", + "type": "alias" + } + } +} diff --git a/src/main/resources/OSMapping/others_compliance/fieldmappings.yml b/src/main/resources/OSMapping/others_compliance/fieldmappings.yml index ddf9081d6..743cef93d 100644 --- a/src/main/resources/OSMapping/others_compliance/fieldmappings.yml +++ b/src/main/resources/OSMapping/others_compliance/fieldmappings.yml @@ -1,7 +1,7 @@ # this file provides pre-defined mappings for Sigma fields defined for all Sigma rules under compliance log group to their corresponding ECS Fields. fieldmappings: - EventID: event_uid - HiveName: unmapped.HiveName - fieldB: mappedB - fieldA1: mappedA - + host.scan.vuln: vulnerability-id + EventID: winlog-event_id + host.scan.vuln_name: vulnerability-enumeration + destination.port: netflow-tcp_destination_port + creationTime: timestamp diff --git a/src/main/resources/OSMapping/others_compliance/mappings.json b/src/main/resources/OSMapping/others_compliance/mappings.json new file mode 100644 index 000000000..31c3db63a --- /dev/null +++ b/src/main/resources/OSMapping/others_compliance/mappings.json @@ -0,0 +1,24 @@ +{ + "properties": { + "vulnerability-id": { + "path": "vulnerability.id", + "type": "alias" + }, + "winlog-event_id": { + "path": "winlog.event_id", + "type": "alias" + }, + "vulnerability-enumeration": { + "path": "vulnerability.enumeration", + "type": "alias" + }, + "netflow-tcp_destination_port": { + "path": "netflow.tcp_destination_port", + "type": "alias" + }, + "timestamp": { + "path": "creationTime", + "type": "alias" + } + } +} diff --git a/src/main/resources/OSMapping/others_macos/fieldmappings.yml b/src/main/resources/OSMapping/others_macos/fieldmappings.yml index 35a88e285..1754fcddc 100644 --- a/src/main/resources/OSMapping/others_macos/fieldmappings.yml +++ b/src/main/resources/OSMapping/others_macos/fieldmappings.yml @@ -4,3 +4,4 @@ fieldmappings: HiveName: unmapped.HiveName fieldB: mappedB fieldA1: mappedA + creationTime: timestamp \ No newline at end of file diff --git a/src/main/resources/OSMapping/others_macos/mappings.json b/src/main/resources/OSMapping/others_macos/mappings.json index 0804434f1..5da91e3d9 100644 --- a/src/main/resources/OSMapping/others_macos/mappings.json +++ b/src/main/resources/OSMapping/others_macos/mappings.json @@ -3,6 +3,10 @@ "macos.event_data.CommandLine": { "type": "alias", "path": "CommandLine" + }, + "timestamp": { + "path": "creationTime", + "type": "alias" } } } \ No newline at end of file diff --git a/src/main/resources/OSMapping/others_proxy/fieldmappings.yml b/src/main/resources/OSMapping/others_proxy/fieldmappings.yml index ddaaa6956..08b377f19 100644 --- a/src/main/resources/OSMapping/others_proxy/fieldmappings.yml +++ b/src/main/resources/OSMapping/others_proxy/fieldmappings.yml @@ -4,4 +4,4 @@ fieldmappings: HiveName: unmapped.HiveName fieldB: mappedB fieldA1: mappedA - + creationTime: timestamp diff --git a/src/main/resources/OSMapping/others_proxy/mappings.json b/src/main/resources/OSMapping/others_proxy/mappings.json new file mode 100644 index 000000000..a3ccdca77 --- /dev/null +++ b/src/main/resources/OSMapping/others_proxy/mappings.json @@ -0,0 +1,32 @@ +{ + "properties": { + "windows-event_data-CommandLine": { + "type": "alias", + "path": "CommandLine" + }, + "event_uid": { + "type": "alias", + "path": "EventID" + }, + "windows-hostname": { + "type": "alias", + "path": "HostName" + }, + "windows-message": { + "type": "alias", + "path": "Message" + }, + "windows-provider-name": { + "type": "alias", + "path": "Provider_Name" + }, + "windows-servicename": { + "type": "alias", + "path": "ServiceName" + }, + "timestamp": { + "path": "creationTime", + "type": "alias" + } + } +} \ No newline at end of file diff --git a/src/main/resources/OSMapping/others_web/fieldmappings.yml b/src/main/resources/OSMapping/others_web/fieldmappings.yml index a1cc130a2..17eb29dda 100644 --- a/src/main/resources/OSMapping/others_web/fieldmappings.yml +++ b/src/main/resources/OSMapping/others_web/fieldmappings.yml @@ -1,7 +1,9 @@ # this file provides pre-defined mappings for Sigma fields defined for all Sigma rules under web log group to their corresponding ECS Fields. fieldmappings: - EventID: event_uid - HiveName: unmapped.HiveName - fieldB: mappedB - fieldA1: mappedA - + c-uri: rsa-web-p_url + sc-status: rsa-misc-status + c-useragent: rsa-web-p_user_agent + cs-method: rsa-web-p_web_method + cs-uri-query: rsa-web-web_ref_query + cs-username: rsa-misc-username_fld + creationTime: timestamp diff --git a/src/main/resources/OSMapping/others_web/mappings.json b/src/main/resources/OSMapping/others_web/mappings.json new file mode 100644 index 000000000..ea2dd3910 --- /dev/null +++ b/src/main/resources/OSMapping/others_web/mappings.json @@ -0,0 +1,32 @@ +{ + "properties": { + "rsa-web-p_url": { + "path": "rsa.web.p_url", + "type": "alias" + }, + "rsa-misc-status": { + "path": "rsa.misc.status", + "type": "alias" + }, + "rsa-web-p_user_agent": { + "path": "rsa.web.p_user_agent", + "type": "alias" + }, + "rsa-web-p_web_method": { + "path": "rsa.web.p_web_method", + "type": "alias" + }, + "rsa-web-web_ref_query": { + "path": "rsa.web.web_ref_query", + "type": "alias" + }, + "rsa-misc-username_fld": { + "path": "rsa.misc.username_fld", + "type": "alias" + }, + "timestamp": { + "path": "creationTime", + "type": "alias" + } + } +} diff --git a/src/main/resources/OSMapping/s3/fieldmappings.yml b/src/main/resources/OSMapping/s3/fieldmappings.yml index 9ab79f08f..7f56ba15d 100644 --- a/src/main/resources/OSMapping/s3/fieldmappings.yml +++ b/src/main/resources/OSMapping/s3/fieldmappings.yml @@ -1,7 +1,4 @@ -# this file provides pre-defined mappings for Sigma fields defined for all Sigma rules under windows log group to their corresponding ECS Fields. fieldmappings: - EventID: event_uid - HiveName: unmapped.HiveName - fieldB: mappedB - fieldA1: mappedA - CommandLine: windows-event_data-CommandLine + eventName: aws-cloudtrail-event_name + eventSource: aws-cloudtrail-event_source + eventTime: timestamp diff --git a/src/main/resources/OSMapping/s3/mappings.json b/src/main/resources/OSMapping/s3/mappings.json index ea77f2460..f08127f93 100644 --- a/src/main/resources/OSMapping/s3/mappings.json +++ b/src/main/resources/OSMapping/s3/mappings.json @@ -1,12 +1,56 @@ { "properties": { - "windows-event_data-CommandLine": { + "cloud.account.id": { "type": "alias", - "path": "CommandLine" + "path": "aws.s3access.requester" }, - "event_uid": { + "cloud.region": { "type": "alias", - "path": "EventID" + "path": "region_id" + }, + "source.geo.country_iso_code": { + "type": "alias", + "path": "src_country_iso_code" + }, + "source.ip": { + "type": "alias", + "path": "aws.s3access.remote_ip" + }, + "Bucket": { + "type": "alias", + "path": "aws.s3access.bucket" + }, + "ErrorCode": { + "type": "alias", + "path": "aws.s3access.error_code" + }, + "HTTPstatus": { + "type": "alias", + "path": "aws.s3access.http_status" + }, + "Operation": { + "type": "alias", + "path": "aws.s3access.operation" + }, + "RequestURI_key": { + "type": "alias", + "path": "aws.s3access.key" + }, + "Requester": { + "type": "alias", + "path": "aws.s3access.requester" + }, + "aws-cloudtrail-event_source": { + "type": "alias", + "path": "aws.cloudtrail.event_source" + }, + "aws-cloudtrail-event_name": { + "type": "alias", + "path": "aws.cloudtrail.event_name" + }, + "timestamp": { + "path": "@timestamp", + "type": "alias" } } -} \ No newline at end of file +} diff --git a/src/main/resources/OSMapping/test_windows/fieldmappings.yml b/src/main/resources/OSMapping/test_windows/fieldmappings.yml new file mode 100644 index 000000000..04fff5ead --- /dev/null +++ b/src/main/resources/OSMapping/test_windows/fieldmappings.yml @@ -0,0 +1,12 @@ +# this file provides pre-defined mappings for Sigma fields defined for all Sigma rules under windows log group to their corresponding ECS Fields. +fieldmappings: + EventID: event_uid + HiveName: unmapped.HiveName + fieldB: mappedB + fieldA1: mappedA + CommandLine: windows-event_data-CommandLine + HostName: windows-hostname + Message: windows-message + Provider_Name: windows-provider-name + ServiceName: windows-servicename + creationTime: timestamp diff --git a/src/main/resources/OSMapping/test_windows/mappings.json b/src/main/resources/OSMapping/test_windows/mappings.json new file mode 100644 index 000000000..a3ccdca77 --- /dev/null +++ b/src/main/resources/OSMapping/test_windows/mappings.json @@ -0,0 +1,32 @@ +{ + "properties": { + "windows-event_data-CommandLine": { + "type": "alias", + "path": "CommandLine" + }, + "event_uid": { + "type": "alias", + "path": "EventID" + }, + "windows-hostname": { + "type": "alias", + "path": "HostName" + }, + "windows-message": { + "type": "alias", + "path": "Message" + }, + "windows-provider-name": { + "type": "alias", + "path": "Provider_Name" + }, + "windows-servicename": { + "type": "alias", + "path": "ServiceName" + }, + "timestamp": { + "path": "creationTime", + "type": "alias" + } + } +} \ No newline at end of file diff --git a/src/main/resources/OSMapping/windows/fieldmappings.yml b/src/main/resources/OSMapping/windows/fieldmappings.yml index 7567e715b..8e905ad52 100644 --- a/src/main/resources/OSMapping/windows/fieldmappings.yml +++ b/src/main/resources/OSMapping/windows/fieldmappings.yml @@ -1,11 +1,65 @@ # this file provides pre-defined mappings for Sigma fields defined for all Sigma rules under windows log group to their corresponding ECS Fields. fieldmappings: - EventID: event_uid - HiveName: unmapped.HiveName - fieldB: mappedB - fieldA1: mappedA - CommandLine: windows-event_data-CommandLine - HostName: windows-hostname - Message: windows-message - Provider_Name: windows-provider-name - ServiceName: windows-servicename + AccountName: winlog-computerObject-name + AuthenticationPackageName: winlog-event_data-AuthenticationPackageName + Channel: winlog-channel + Company: winlog-event_data-Company + ComputerName: winlog-computer_name + Description: winlog-event_data-Description + Details: winlog-event_data-Detail + Device: winlog-event_data-DeviceName + DeviceName: winlog-event_data-DeviceName + FileName: winlog-event_data-OriginalFileName + FileVersion: winlog-event_data-FileVersion + IntegrityLevel: winlog-event_data-IntegrityLevel + IpAddress: winlog-event_data-IpAddress + KeyLength: winlog-event_data-KeyLength + Keywords: winlog-keywords + LogonId: winlog-event_data-LogonId + LogonProcessName: winlog-event_data-LogonProcessName + LogonType: winlog-event_data-LogonType + OriginalFileName: winlog-event_data-OriginalFileName + OriginalFilename: winlog-event_data-OriginalFileName + Path: winlog-event_data-Path + PrivilegeList: winlog-event_data-PrivilegeList + ProcessId: winlog-event_data-ProcessId + Product: winlog-event_data-Product + Provider: winlog-provider_name + ProviderName: winlog-provider_name + ScriptBlockText: winlog-event_data-ScriptBlockText + ServerName: winlog-event_data-TargetServerName + Service: winlog-event_data-ServiceName + Signed: winlog-event_data-Signed + State: winlog-event_data-State + Status: winlog-event_data-Status + SubjectDomainName: winlog-event_data-SubjectDomainName + SubjectLogonId: winlog-event_data-SubjectLogonId + SubjectUserName: winlog-event_data-SubjectUserName + SubjectUserSid: winlog-event_data-SubjectUserSid + TargetLogonId: winlog-event_data-TargetLogonId + TargetName: winlog-event_data-TargetUserName + TargetServerName: winlog-event_data-TargetServerName + TargetUserName: winlog-event_data-TargetUserName + TargetUserSid: winlog-event_data-TargetUserSid + TaskName: winlog-task + Type: winlog-user-type + User: winlog-user-name + UserName: winlog-user-name + Workstation: winlog-event_data-Workstation + WorkstationName: winlog-event_data-Workstation + event_uid: winlog-event_id + CommandLine: server-user-hash + hostname: host-hostname + message: windows-message + Provider_Name: winlog-provider_name + EventId: winlog-event_id + processPath: winlog-event_data-ProcessPath + ProcessName: winlog-event_data-ProcessName + ObjectName: winlog-computerObject-name + param1: winlog-event_data-param1 + param2: winlog-event_data-param2 + windows-hostname: winlog-computer_name + windows-provider-name: winlog-provider_name + windows-servicename: winlog-event_data-ServiceName + creationTime: timestamp + diff --git a/src/main/resources/OSMapping/windows/mappings.json b/src/main/resources/OSMapping/windows/mappings.json index 48cdda71d..f2eef2a51 100644 --- a/src/main/resources/OSMapping/windows/mappings.json +++ b/src/main/resources/OSMapping/windows/mappings.json @@ -1,28 +1,196 @@ { "properties": { - "windows-event_data-CommandLine": { - "type": "alias", - "path": "CommandLine" + "winlog-computerObject-name": { + "path": "winlog.computerObject.name", + "type": "alias" }, - "event_uid": { - "type": "alias", - "path": "EventID" + "winlog-event_data-AuthenticationPackageName": { + "path": "winlog.event_data.AuthenticationPackageName", + "type": "alias" }, - "windows-hostname": { - "type": "alias", - "path": "HostName" + "winlog-channel": { + "path": "winlog.channel", + "type": "alias" + }, + "winlog-event_data-Company": { + "path": "winlog.event_data.Company", + "type": "alias" + }, + "winlog-computer_name": { + "path": "winlog.computer_name", + "type": "alias" + }, + "winlog-event_data-Description": { + "path": "winlog.event_data.Description", + "type": "alias" + }, + "winlog-event_data-Detail": { + "path": "winlog.event_data.Detail", + "type": "alias" + }, + "winlog-event_data-DeviceName": { + "path": "winlog.event_data.DeviceName", + "type": "alias" + }, + "winlog-event_data-OriginalFileName": { + "path": "winlog.event_data.OriginalFileName", + "type": "alias" + }, + "winlog-event_data-FileVersion": { + "path": "winlog.event_data.FileVersion", + "type": "alias" + }, + "winlog-event_data-IntegrityLevel": { + "path": "winlog.event_data.IntegrityLevel", + "type": "alias" + }, + "winlog-event_data-IpAddress": { + "path": "winlog.event_data.IpAddress", + "type": "alias" + }, + "winlog-event_data-KeyLength": { + "path": "winlog.event_data.KeyLength", + "type": "alias" + }, + "winlog-keywords": { + "path": "winlog.keywords", + "type": "alias" + }, + "winlog-event_data-LogonId": { + "path": "winlog.event_data.LogonId", + "type": "alias" + }, + "winlog-event_data-LogonProcessName": { + "path": "winlog.event_data.LogonProcessName", + "type": "alias" + }, + "winlog-event_data-LogonType": { + "path": "winlog.event_data.LogonType", + "type": "alias" + }, + "winlog-event_data-Path": { + "path": "winlog.event_data.Path", + "type": "alias" + }, + "winlog-event_data-PrivilegeList": { + "path": "winlog.event_data.PrivilegeList", + "type": "alias" + }, + "winlog-event_data-ProcessId": { + "path": "winlog.event_data.ProcessId", + "type": "alias" + }, + "winlog-event_data-Product": { + "path": "winlog.event_data.Product", + "type": "alias" + }, + "winlog-provider_name": { + "path": "winlog.provider_name", + "type": "alias" + }, + "winlog-event_data-ScriptBlockText": { + "path": "winlog.event_data.ScriptBlockText", + "type": "alias" + }, + "winlog-event_data-TargetServerName": { + "path": "winlog.event_data.TargetServerName", + "type": "alias" + }, + "winlog-event_data-ServiceName": { + "path": "winlog.event_data.ServiceName", + "type": "alias" + }, + "winlog-event_data-Signed": { + "path": "winlog.event_data.Signed", + "type": "alias" + }, + "winlog-event_data-State": { + "path": "winlog.event_data.State", + "type": "alias" + }, + "winlog-event_data-Status": { + "path": "winlog.event_data.Status", + "type": "alias" + }, + "winlog-event_data-SubjectDomainName": { + "path": "winlog.event_data.SubjectDomainName", + "type": "alias" + }, + "winlog-event_data-SubjectLogonId": { + "path": "winlog.event_data.SubjectLogonId", + "type": "alias" + }, + "winlog-event_data-SubjectUserName": { + "path": "winlog.event_data.SubjectUserName", + "type": "alias" + }, + "winlog-event_data-SubjectUserSid": { + "path": "winlog.event_data.SubjectUserSid", + "type": "alias" + }, + "winlog-event_data-TargetLogonId": { + "path": "winlog.event_data.TargetLogonId", + "type": "alias" + }, + "winlog-event_data-TargetUserName": { + "path": "winlog.event_data.TargetUserName", + "type": "alias" + }, + "winlog-event_data-TargetUserSid": { + "path": "winlog.event_data.TargetUserSid", + "type": "alias" + }, + "winlog-task": { + "path": "winlog.task", + "type": "alias" + }, + "winlog-user-type": { + "path": "winlog.user.type", + "type": "alias" + }, + "winlog-user-name": { + "path": "winlog.user.name", + "type": "alias" + }, + "winlog-event_data-Workstation": { + "path": "winlog.event_data.Workstation", + "type": "alias" + }, + "winlog-event_id": { + "path": "winlog.event_id", + "type": "alias" + }, + "server-user-hash": { + "path": "server.user.hash", + "type": "alias" + }, + "host-hostname": { + "path": "host.hostname", + "type": "alias" }, "windows-message": { - "type": "alias", - "path": "Message" + "path": "windows.message", + "type": "alias" + }, + "winlog-event_data-ProcessPath": { + "path": "winlog.event_data.ProcessPath", + "type": "alias" + }, + "winlog-event_data-ProcessName": { + "path": "winlog.event_data.ProcessName", + "type": "alias" + }, + "winlog-event_data-param1": { + "path": "winlog.event_data.param1", + "type": "alias" }, - "windows-provider-name": { - "type": "alias", - "path": "Provider_Name" + "winlog-event_data-param2": { + "path": "winlog.event_data.param2", + "type": "alias" }, - "windows-servicename": { - "type": "alias", - "path": "ServiceName" + "timestamp": { + "path": "creationTime", + "type": "alias" } } } \ No newline at end of file diff --git a/src/main/resources/mappings/alert_mapping.json b/src/main/resources/mappings/alert_mapping.json new file mode 100644 index 000000000..fcb1d1c94 --- /dev/null +++ b/src/main/resources/mappings/alert_mapping.json @@ -0,0 +1,157 @@ +{ + "dynamic": "strict", + "_routing": { + "required": true + }, + "_meta" : { + "schema_version": 4 + }, + "properties": { + "schema_version": { + "type": "integer" + }, + "monitor_id": { + "type": "keyword" + }, + "monitor_version": { + "type": "long" + }, + "id": { + "type": "keyword" + }, + "version": { + "type": "long" + }, + "severity": { + "type": "keyword" + }, + "monitor_name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "monitor_user": { + "properties": { + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "backend_roles": { + "type" : "text", + "fields" : { + "keyword" : { + "type" : "keyword" + } + } + }, + "roles": { + "type" : "text", + "fields" : { + "keyword" : { + "type" : "keyword" + } + } + }, + "custom_attribute_names": { + "type" : "text", + "fields" : { + "keyword" : { + "type" : "keyword" + } + } + } + } + }, + "trigger_id": { + "type": "keyword" + }, + "trigger_name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "finding_ids": { + "type" : "text", + "fields" : { + "keyword" : { + "type" : "keyword" + } + } + }, + "related_doc_ids": { + "type" : "text", + "fields" : { + "keyword" : { + "type" : "keyword" + } + } + }, + "state": { + "type": "keyword" + }, + "start_time": { + "type": "date" + }, + "last_notification_time": { + "type": "date" + }, + "acknowledged_time": { + "type": "date" + }, + "end_time": { + "type": "date" + }, + "error_message": { + "type": "text" + }, + "alert_history": { + "type": "nested", + "properties": { + "timestamp": { + "type": "date" + }, + "message": { + "type": "text" + } + } + }, + "action_execution_results": { + "type": "nested", + "properties": { + "action_id": { + "type": "keyword" + }, + "last_execution_time": { + "type": "date" + }, + "throttled_count": { + "type": "integer" + } + } + }, + "agg_alert_content": { + "dynamic": true, + "properties": { + "parent_bucket_path": { + "type": "text" + }, + "bucket_key": { + "type": "text" + } + } + } + } +} \ No newline at end of file diff --git a/src/main/resources/mappings/correlation-rules.json b/src/main/resources/mappings/correlation-rules.json new file mode 100644 index 000000000..877a62ce0 --- /dev/null +++ b/src/main/resources/mappings/correlation-rules.json @@ -0,0 +1,52 @@ +{ + "_meta" : { + "schema_version": 1 + }, + "properties": { + "name": { + "type": "text", + "analyzer" : "whitespace", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "correlate": { + "type": "nested", + "properties": { + "index": { + "type": "text", + "analyzer" : "whitespace", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "query": { + "type": "text", + "analyzer" : "whitespace", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "category": { + "type": "text", + "analyzer" : "whitespace", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } + } + } +} diff --git a/src/main/resources/mappings/correlation.json b/src/main/resources/mappings/correlation.json new file mode 100644 index 000000000..8fbe2ea71 --- /dev/null +++ b/src/main/resources/mappings/correlation.json @@ -0,0 +1,51 @@ +{ + "_meta" : { + "schema_version": 1 + }, + "properties": { + "root": { + "type": "boolean" + }, + "counter":{ + "type": "long" + }, + "finding1":{ + "type": "keyword" + }, + "finding2":{ + "type": "keyword" + }, + "corr_vector": { + "type": "sa_vector", + "dimension": 101, + "correlation_ctx": { + "similarityFunction": "EUCLIDEAN", + "parameters": { + "m": 16, + "ef_construction": 128 + } + } + }, + "timestamp":{ + "type": "long" + }, + "logType": { + "type": "keyword" + }, + "recordType": { + "type": "keyword" + }, + "scoreTimestamp": { + "type": "long" + }, + "corrRules": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } +} \ No newline at end of file diff --git a/src/main/resources/mappings/detector-queries.json b/src/main/resources/mappings/detector-queries.json deleted file mode 100644 index 7f0602df7..000000000 --- a/src/main/resources/mappings/detector-queries.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "_meta": { - "schema_version": 1 - }, - "properties": { - "query": { - "type": "percolator_ext" - }, - "monitor_id": { - "type": "text" - }, - "index": { - "type": "text" - } - } -} \ No newline at end of file diff --git a/src/main/resources/mappings/detectors.json b/src/main/resources/mappings/detectors.json index aefa0dbf6..0282ee77b 100644 --- a/src/main/resources/mappings/detectors.json +++ b/src/main/resources/mappings/detectors.json @@ -97,6 +97,9 @@ } } }, + "monitor_id": { + "type": "keyword" + }, "schedule": { "properties": { "period": { diff --git a/src/main/resources/mappings/finding_mapping.json b/src/main/resources/mappings/finding_mapping.json new file mode 100644 index 000000000..421dc202c --- /dev/null +++ b/src/main/resources/mappings/finding_mapping.json @@ -0,0 +1,65 @@ +{ + "dynamic": "strict", + "_meta" : { + "schema_version": 2 + }, + "properties": { + "schema_version": { + "type": "integer" + }, + "related_doc_ids": { + "type" : "text", + "fields" : { + "keyword" : { + "type" : "keyword" + } + } + }, + "monitor_id": { + "type": "keyword" + }, + "monitor_name": { + "type": "keyword" + }, + "id": { + "type": "keyword" + }, + "index": { + "type": "keyword" + }, + "queries" : { + "type": "nested", + "properties": { + "id": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "query": { + "type": "text" + }, + "tags": { + "type": "text", + "fields" : { + "keyword" : { + "type" : "keyword" + } + } + } + } + }, + "timestamp": { + "type": "long" + }, + "correlated_doc_ids": { + "type" : "text", + "analyzer": "whitespace", + "fields" : { + "keyword" : { + "type" : "keyword" + } + } + } + } +} \ No newline at end of file diff --git a/src/main/resources/mappings/rules.json b/src/main/resources/mappings/rules.json index 50651c39a..397331805 100644 --- a/src/main/resources/mappings/rules.json +++ b/src/main/resources/mappings/rules.json @@ -100,6 +100,15 @@ } } }, + "query_field_names": { + "type": "nested", + "properties": { + "value": { + "type": "keyword", + "ignore_above": 256 + } + } + }, "rule": { "type": "text", "fields": { diff --git a/src/main/resources/rules/ad_ldap/azure_aad_secops_signin_failure_bad_password_threshold.yml b/src/main/resources/rules/ad_ldap/azure_aad_secops_signin_failure_bad_password_threshold.yml index a914ca337..c34e90a3e 100644 --- a/src/main/resources/rules/ad_ldap/azure_aad_secops_signin_failure_bad_password_threshold.yml +++ b/src/main/resources/rules/ad_ldap/azure_aad_secops_signin_failure_bad_password_threshold.yml @@ -1,5 +1,5 @@ title: Sign-in Failure Bad Password Threshold -id: dff74231-dbed-42ab-ba49-83289be2ac3a +id: dff74231-dbed-42ab-ba49-84289be2ac3a description: Define a baseline threshold and then monitor and adjust to suit your organizational behaviors and limit false alerts from being generated. author: Corissa Koopmans, '@corissalea' date: 2022/04/21 diff --git a/src/main/resources/rules/ad_ldap/azure_aadhybridhealth_adfs_new_server.yml b/src/main/resources/rules/ad_ldap/azure_aadhybridhealth_adfs_new_server.yml index 8088ce3d7..9e8562774 100644 --- a/src/main/resources/rules/ad_ldap/azure_aadhybridhealth_adfs_new_server.yml +++ b/src/main/resources/rules/ad_ldap/azure_aadhybridhealth_adfs_new_server.yml @@ -1,5 +1,5 @@ title: Azure Active Directory Hybrid Health AD FS New Server -id: 288a39fc-4914-4831-9ada-270e9dc12cb4 +id: 287a39fc-4914-4831-9ada-270e9dc12cb4 description: | This detection uses azureactivity logs (Administrative category) to identify the creation or update of a server instance in an Azure AD Hybrid health AD FS service. A threat actor can create a new AD Health ADFS service and create a fake server instance to spoof AD FS signing logs. There is no need to compromise an on-prem AD FS server. diff --git a/src/main/resources/rules/ad_ldap/azure_aadhybridhealth_adfs_service_delete.yml b/src/main/resources/rules/ad_ldap/azure_aadhybridhealth_adfs_service_delete.yml index 6fc97a25f..c325d3d38 100644 --- a/src/main/resources/rules/ad_ldap/azure_aadhybridhealth_adfs_service_delete.yml +++ b/src/main/resources/rules/ad_ldap/azure_aadhybridhealth_adfs_service_delete.yml @@ -1,5 +1,5 @@ title: Azure Active Directory Hybrid Health AD FS Service Delete -id: 48739819-8230-4ee3-a8ea-e0289d1fb0ff +id: 48739819-8230-4de3-a8ea-e0289d1fb0ff description: | This detection uses azureactivity logs (Administrative category) to identify the deletion of an Azure AD Hybrid health AD FS service instance in a tenant. A threat actor can create a new AD Health ADFS service and create a fake server to spoof AD FS signing logs. diff --git a/src/main/resources/rules/ad_ldap/azure_ad_bitlocker_key_retrieval.yml b/src/main/resources/rules/ad_ldap/azure_ad_bitlocker_key_retrieval.yml index e203e67b7..d999c4a67 100644 --- a/src/main/resources/rules/ad_ldap/azure_ad_bitlocker_key_retrieval.yml +++ b/src/main/resources/rules/ad_ldap/azure_ad_bitlocker_key_retrieval.yml @@ -1,5 +1,5 @@ title: Bitlocker Key Retrieval -id: a0413867-daf3-43dd-9245-734b3a787942 +id: a0413867-daf3-43dd-9255-734b3a787942 description: Monitor and alert for Bitlocker key retrieval. author: Michael Epping, '@mepples21' date: 2022/06/28 diff --git a/src/main/resources/rules/ad_ldap/azure_ad_device_registration_or_join_without_mfa.yml b/src/main/resources/rules/ad_ldap/azure_ad_device_registration_or_join_without_mfa.yml index 23c3582cb..65917ece4 100644 --- a/src/main/resources/rules/ad_ldap/azure_ad_device_registration_or_join_without_mfa.yml +++ b/src/main/resources/rules/ad_ldap/azure_ad_device_registration_or_join_without_mfa.yml @@ -1,5 +1,5 @@ title: Device Registration or Join Without MFA -id: 5afa454e-030c-4ab4-9253-a90aa7fcc581 +id: 5afa454e-030c-4ab4-9253-a90aa7fac581 description: Monitor and alert for device registration or join events where MFA was not performed. author: Michael Epping, '@mepples21' date: 2022/06/28 diff --git a/src/main/resources/rules/ad_ldap/azure_ad_device_registration_policy_changes.yml b/src/main/resources/rules/ad_ldap/azure_ad_device_registration_policy_changes.yml index e4c8d8555..08da8a3af 100644 --- a/src/main/resources/rules/ad_ldap/azure_ad_device_registration_policy_changes.yml +++ b/src/main/resources/rules/ad_ldap/azure_ad_device_registration_policy_changes.yml @@ -1,5 +1,5 @@ title: Changes to Device Registration Policy -id: 9494bff8-959f-4440-bbce-fb87a208d517 +id: 9494bff8-959f-4440-abce-fb87a208d517 description: Monitor and alert for changes to the device registration policy. author: Michael Epping, '@mepples21' date: 2022/06/28 diff --git a/src/main/resources/rules/ad_ldap/azure_ad_sign_ins_from_noncompliant_devices.yml b/src/main/resources/rules/ad_ldap/azure_ad_sign_ins_from_noncompliant_devices.yml index 45003d427..e5a0e7198 100644 --- a/src/main/resources/rules/ad_ldap/azure_ad_sign_ins_from_noncompliant_devices.yml +++ b/src/main/resources/rules/ad_ldap/azure_ad_sign_ins_from_noncompliant_devices.yml @@ -1,5 +1,5 @@ title: Sign-ins from Non-Compliant Devices -id: 4f77e1d7-3982-4ee0-8489-abf2d6b75284 +id: 4f77e1d7-3972-4ee0-8489-abf2d6b75284 description: Monitor and alert for sign-ins where the device was non-compliant. author: Michael Epping, '@mepples21' date: 2022/06/28 diff --git a/src/main/resources/rules/ad_ldap/azure_ad_sign_ins_from_unknown_devices.yml b/src/main/resources/rules/ad_ldap/azure_ad_sign_ins_from_unknown_devices.yml index 59e6ad2f1..db67bb1ea 100644 --- a/src/main/resources/rules/ad_ldap/azure_ad_sign_ins_from_unknown_devices.yml +++ b/src/main/resources/rules/ad_ldap/azure_ad_sign_ins_from_unknown_devices.yml @@ -1,5 +1,5 @@ title: Sign-ins by Unknown Devices -id: 4d136857-6a1a-432a-82fc-5dd497ee5e7c +id: 4d136857-6a1a-432a-82ec-5dd497ee5e7c description: Monitor and alert for Sign-ins by unknown devices from non-Trusted locations. author: Michael Epping, '@mepples21' date: 2022/06/28 diff --git a/src/main/resources/rules/ad_ldap/azure_ad_user_added_to_admin_role.yml b/src/main/resources/rules/ad_ldap/azure_ad_user_added_to_admin_role.yml index c85eeffd0..39896aa3e 100644 --- a/src/main/resources/rules/ad_ldap/azure_ad_user_added_to_admin_role.yml +++ b/src/main/resources/rules/ad_ldap/azure_ad_user_added_to_admin_role.yml @@ -1,5 +1,5 @@ title: User Added to an Administrator's Azure AD Role -id: ebbeb024-5b1d-4e16-9c0c-917f86c708a7 +id: ebbeb024-5b1d-4e16-9c1c-917f86c708a7 description: User Added to an Administrator's Azure AD Role author: Raphaël CALVET, @MetallicHack date: 2021/10/04 diff --git a/src/main/resources/rules/ad_ldap/azure_ad_users_added_to_device_admin_roles.yml b/src/main/resources/rules/ad_ldap/azure_ad_users_added_to_device_admin_roles.yml index 0c3140549..0a211a1c8 100644 --- a/src/main/resources/rules/ad_ldap/azure_ad_users_added_to_device_admin_roles.yml +++ b/src/main/resources/rules/ad_ldap/azure_ad_users_added_to_device_admin_roles.yml @@ -1,5 +1,5 @@ title: Users Added to Global or Device Admin Roles -id: 11c767ae-500b-423b-bae3-b234450736ed +id: 11c767ae-500b-423b-bae3-b244450736ed description: Monitor and alert for users added to device admin roles. author: Michael Epping, '@mepples21' date: 2022/06/28 diff --git a/src/main/resources/rules/ad_ldap/win_ldap_recon.yml b/src/main/resources/rules/ad_ldap/win_ldap_recon.yml index e0a9559dd..d5ccf9620 100644 --- a/src/main/resources/rules/ad_ldap/win_ldap_recon.yml +++ b/src/main/resources/rules/ad_ldap/win_ldap_recon.yml @@ -1,5 +1,5 @@ title: LDAP Reconnaissance / Active Directory Enumeration -id: 31d68132-4038-47c7-8f8e-635a39a7c174 +id: 31d68132-4038-47c7-8f8d-635a39a7c174 status: experimental description: Detects possible Active Directory enumeration via LDAP author: Adeem Mawani diff --git a/src/main/resources/rules/azure/azure_aad_secops_signin_failure_bad_password_threshold.yml b/src/main/resources/rules/azure/azure_aad_secops_signin_failure_bad_password_threshold.yml new file mode 100644 index 000000000..a914ca337 --- /dev/null +++ b/src/main/resources/rules/azure/azure_aad_secops_signin_failure_bad_password_threshold.yml @@ -0,0 +1,27 @@ +title: Sign-in Failure Bad Password Threshold +id: dff74231-dbed-42ab-ba49-83289be2ac3a +description: Define a baseline threshold and then monitor and adjust to suit your organizational behaviors and limit false alerts from being generated. +author: Corissa Koopmans, '@corissalea' +date: 2022/04/21 +references: + - https://docs.microsoft.com/en-us/azure/active-directory/fundamentals/security-operations-privileged-accounts#things-to-monitor +logsource: + product: azure + service: signinlogs +detection: + selection: + ResultType: 50126 + ResultDescription: Invalid username or password or Invalid on-premises username or password. + filter_computer: + TargetUserName|endswith: '$' + condition: selection and not filter_computer +falsepositives: + - Failed Azure AD Connect Synchronization + - Service account use with an incorrect password specified + - Misconfigured systems + - Vulnerability scanners +level: high +status: experimental +tags: + - attack.credential_access + - attack.t1110 diff --git a/src/main/resources/rules/azure/azure_aadhybridhealth_adfs_new_server.yml b/src/main/resources/rules/azure/azure_aadhybridhealth_adfs_new_server.yml new file mode 100644 index 000000000..8088ce3d7 --- /dev/null +++ b/src/main/resources/rules/azure/azure_aadhybridhealth_adfs_new_server.yml @@ -0,0 +1,27 @@ +title: Azure Active Directory Hybrid Health AD FS New Server +id: 288a39fc-4914-4831-9ada-270e9dc12cb4 +description: | + This detection uses azureactivity logs (Administrative category) to identify the creation or update of a server instance in an Azure AD Hybrid health AD FS service. + A threat actor can create a new AD Health ADFS service and create a fake server instance to spoof AD FS signing logs. There is no need to compromise an on-prem AD FS server. + This can be done programmatically via HTTP requests to Azure. +status: experimental +date: 2021/08/26 +author: Roberto Rodriguez (Cyb3rWard0g), OTR (Open Threat Research), MSTIC +tags: + - attack.defense_evasion + - attack.t1578 +references: + - https://o365blog.com/post/hybridhealthagent/ +logsource: + product: azure + service: azureactivity +detection: + selection: + CategoryValue: 'Administrative' + ResourceProviderValue: 'Microsoft.ADHybridHealthService' + ResourceId|contains: 'AdFederationService' + OperationNameValue: 'Microsoft.ADHybridHealthService/services/servicemembers/action' + condition: selection +falsepositives: + - Legitimate AD FS servers added to an AAD Health AD FS service instance +level: medium \ No newline at end of file diff --git a/src/main/resources/rules/azure/azure_aadhybridhealth_adfs_service_delete.yml b/src/main/resources/rules/azure/azure_aadhybridhealth_adfs_service_delete.yml new file mode 100644 index 000000000..6fc97a25f --- /dev/null +++ b/src/main/resources/rules/azure/azure_aadhybridhealth_adfs_service_delete.yml @@ -0,0 +1,27 @@ +title: Azure Active Directory Hybrid Health AD FS Service Delete +id: 48739819-8230-4ee3-a8ea-e0289d1fb0ff +description: | + This detection uses azureactivity logs (Administrative category) to identify the deletion of an Azure AD Hybrid health AD FS service instance in a tenant. + A threat actor can create a new AD Health ADFS service and create a fake server to spoof AD FS signing logs. + The health AD FS service can then be deleted after it is not longer needed via HTTP requests to Azure. +status: experimental +date: 2021/08/26 +author: Roberto Rodriguez (Cyb3rWard0g), OTR (Open Threat Research), MSTIC +tags: + - attack.defense_evasion + - attack.t1578.003 +references: + - https://o365blog.com/post/hybridhealthagent/ +logsource: + product: azure + service: azureactivity +detection: + selection: + CategoryValue: 'Administrative' + ResourceProviderValue: 'Microsoft.ADHybridHealthService' + ResourceId|contains: 'AdFederationService' + OperationNameValue: 'Microsoft.ADHybridHealthService/services/delete' + condition: selection +falsepositives: + - Legitimate AAD Health AD FS service instances being deleted in a tenant +level: medium \ No newline at end of file diff --git a/src/main/resources/rules/azure/azure_account_lockout.yml b/src/main/resources/rules/azure/azure_account_lockout.yml new file mode 100644 index 000000000..4a13747c1 --- /dev/null +++ b/src/main/resources/rules/azure/azure_account_lockout.yml @@ -0,0 +1,21 @@ +title: Account Lockout +id: 2b7d6fc0-71ac-4cf7-8ed1-b5788ee5257a +status: experimental +author: AlertIQ +date: 2021/10/10 +description: Identifies user account which has been locked because the user tried to sign in too many times with an incorrect user ID or password. +references: + - https://docs.microsoft.com/en-us/azure/active-directory/fundamentals/security-operations-privileged-accounts +logsource: + product: azure + service: signinlogs +detection: + selection: + ResultType: 50053 + condition: selection +level: medium +falsepositives: + - Unknown +tags: + - attack.credential_access + - attack.t1110 diff --git a/src/main/resources/rules/azure/azure_app_appid_uri_changes.yml b/src/main/resources/rules/azure/azure_app_appid_uri_changes.yml new file mode 100644 index 000000000..1a765db8c --- /dev/null +++ b/src/main/resources/rules/azure/azure_app_appid_uri_changes.yml @@ -0,0 +1,24 @@ +title: Application AppID Uri Configuration Changes +id: 1b45b0d1-773f-4f23-aedc-814b759563b1 +description: Detects when a configuration change is made to an applications AppID URI. +author: Mark Morowczynski '@markmorow', Bailey Bercik '@baileybercik' +date: 2022/06/02 +references: + - https://docs.microsoft.com/en-us/azure/active-directory/fundamentals/security-operations-applications#appid-uri-added-modified-or-removed +logsource: + product: azure + service: auditlogs +detection: + selection: + properties.message: + - Update Application + - Update Service principal + condition: selection +falsepositives: + - When and administrator is making legitmate AppID URI configuration changes to an application. This should be a planned event. +level: high +status: experimental +tags: + - attack.t1528 + - attack.persistence + - attack.credential_access diff --git a/src/main/resources/rules/azure/azure_app_credential_added.yml b/src/main/resources/rules/azure/azure_app_credential_added.yml new file mode 100644 index 000000000..0942ad419 --- /dev/null +++ b/src/main/resources/rules/azure/azure_app_credential_added.yml @@ -0,0 +1,23 @@ +title: Added Credentials to Existing Application +id: cbb67ecc-fb70-4467-9350-c910bdf7c628 +description: Detects when a new credential is added to an existing applcation. Any additional credentials added outside of expected processes could be a malicious actor using those credentials. +author: Mark Morowczynski '@markmorow', Bailey Bercik '@baileybercik' +date: 2022/05/26 +references: + - https://docs.microsoft.com/en-us/azure/active-directory/fundamentals/security-operations-applications#application-credentials +logsource: + product: azure + service: auditlogs +detection: + selection: + properties.message: + - Update Application-Certificates and secrets management + - Update Service principal/Update Application + condition: selection +falsepositives: + - When credentials are added/removed as part of the normal working hours/workflows +level: high +status: experimental +tags: + - attack.t1098 + - attack.persistence diff --git a/src/main/resources/rules/azure/azure_app_credential_modification.yml b/src/main/resources/rules/azure/azure_app_credential_modification.yml new file mode 100644 index 000000000..bca556a2e --- /dev/null +++ b/src/main/resources/rules/azure/azure_app_credential_modification.yml @@ -0,0 +1,22 @@ +title: Azure Application Credential Modified +id: cdeef967-f9a1-4375-90ee-6978c5f23974 +description: Identifies when a application credential is modified. +author: Austin Songer @austinsonger +status: experimental +date: 2021/09/02 +references: + - https://www.cloud-architekt.net/auditing-of-msi-and-service-principals/ +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: 'Update application - Certificates and secrets management' + condition: selection +level: medium +tags: + - attack.impact +falsepositives: + - Application credential added may be performed by a system administrator. + - Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Application credential added from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_app_device_code_authentication.yml b/src/main/resources/rules/azure/azure_app_device_code_authentication.yml new file mode 100644 index 000000000..0854d4a5b --- /dev/null +++ b/src/main/resources/rules/azure/azure_app_device_code_authentication.yml @@ -0,0 +1,27 @@ +title: Application Using Device Code Authentication Flow +id: 248649b7-d64f-46f0-9fb2-a52774166fb5 +status: experimental +description: | + Device code flow is an OAuth 2.0 protocol flow specifically for input constrained devices and is not used in all environments. + If this type of flow is seen in the environment and not being used in an input constrained device scenario, further investigation is warranted. + This can be a misconfigured application or potentially something malicious. +author: Mark Morowczynski '@markmorow', Bailey Bercik '@baileybercik' +date: 2022/06/01 +references: + - https://docs.microsoft.com/en-us/azure/active-directory/fundamentals/security-operations-applications#application-authentication-flows +logsource: + product: azure + service: signinlogs +detection: + selection: + properties.message: Device Code + condition: selection +falsepositives: + - Applications that are input constrained will need to use device code flow and are valid authentications. +level: medium +tags: + - attack.t1078 + - attack.defense_evasion + - attack.persistence + - attack.privilege_escalation + - attack.initial_access diff --git a/src/main/resources/rules/azure/azure_app_owner_added.yml b/src/main/resources/rules/azure/azure_app_owner_added.yml new file mode 100644 index 000000000..de29eb8ed --- /dev/null +++ b/src/main/resources/rules/azure/azure_app_owner_added.yml @@ -0,0 +1,23 @@ +title: Added Owner To Application +id: 74298991-9fc4-460e-a92e-511aa60baec1 +description: Detects when a new owner is added to an application. This gives that account privileges to make modifications and configuration changes to the application. +author: Mark Morowczynski '@markmorow', Bailey Bercik '@baileybercik' +date: 2022/06/02 +references: + - https://docs.microsoft.com/en-us/azure/active-directory/fundamentals/security-operations-applications#new-owner +logsource: + product: azure + service: auditlogs +detection: + selection: + properties.message: Add owner to application + condition: selection +falsepositives: + - When a new application owner is added by an administrator +level: medium +status: experimental +tags: + - attack.t1528 + - attack.persistence + - attack.credential_access + - attack.defense_evasion diff --git a/src/main/resources/rules/azure/azure_app_ropc_authentication.yml b/src/main/resources/rules/azure/azure_app_ropc_authentication.yml new file mode 100644 index 000000000..f2cec560f --- /dev/null +++ b/src/main/resources/rules/azure/azure_app_ropc_authentication.yml @@ -0,0 +1,24 @@ +title: Applications That Are Using ROPC Authentication Flow +id: 55695bc0-c8cf-461f-a379-2535f563c854 +description: Resource owner password credentials (ROPC) should be avoided if at all possible as this requires the user to expose their current password credentials to the application directly. The application then uses those credentials to authenticate the user against the identity provider. +author: Mark Morowczynski '@markmorow', Bailey Bercik '@baileybercik' +date: 2022/06/01 +references: + - https://docs.microsoft.com/en-us/azure/active-directory/fundamentals/security-operations-applications#application-authentication-flows +logsource: + product: azure + service: signinlogs +detection: + selection: + properties.message: ROPC + condition: selection +falsepositives: + - Applications that are being used as part of automated testing or a legacy application that cannot use any other modern authentication flow +level: medium +status: experimental +tags: + - attack.t1078 + - attack.defense_evasion + - attack.persistence + - attack.privilege_escalation + - attack.initial_access diff --git a/src/main/resources/rules/azure/azure_app_uri_modifications.yml b/src/main/resources/rules/azure/azure_app_uri_modifications.yml new file mode 100644 index 000000000..d5724b91d --- /dev/null +++ b/src/main/resources/rules/azure/azure_app_uri_modifications.yml @@ -0,0 +1,24 @@ +title: Application URI Configuration Changes +id: 0055ad1f-be85-4798-83cf-a6da17c993b3 +description: Detects when a configuration change is made to an applications URI. + URIs for domain names that no longer exist (dangling URIs), not using HTTPS, wildcards at the end of the domain, URIs that are no unique to that app, + or URIs that point to domains you do not control should be investigated. +author: Mark Morowczynski '@markmorow', Bailey Bercik '@baileybercik' +date: 2022/06/02 +references: + - https://docs.microsoft.com/en-us/azure/active-directory/fundamentals/security-operations-applications#application-configuration-changes +logsource: + product: azure + service: auditlogs +detection: + selection: + properties.message: Update Application Sucess- Property Name AppAddress + condition: selection +falsepositives: + - When and administrator is making legitmate URI configuration changes to an application. This should be a planned event. +level: high +status: experimental +tags: + - attack.t1528 + - attack.persistence + - attack.credential_access diff --git a/src/main/resources/rules/azure/azure_application_deleted.yml b/src/main/resources/rules/azure/azure_application_deleted.yml new file mode 100644 index 000000000..6d3ee5b0d --- /dev/null +++ b/src/main/resources/rules/azure/azure_application_deleted.yml @@ -0,0 +1,24 @@ +title: Azure Application Deleted +id: 410d2a41-1e6d-452f-85e5-abdd8257a823 +description: Identifies when a application is deleted in Azure. +author: Austin Songer @austinsonger +status: experimental +date: 2021/09/03 +references: + - https://docs.microsoft.com/en-us/azure/active-directory/reports-monitoring/reference-audit-activities#application-proxy +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - Delete application + - Hard Delete application + condition: selection +level: medium +tags: + - attack.defense_evasion +falsepositives: + - Application being deleted may be performed by a system administrator. + - Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Application deleted from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_application_gateway_modified_or_deleted.yml b/src/main/resources/rules/azure/azure_application_gateway_modified_or_deleted.yml new file mode 100644 index 000000000..dab3bf97c --- /dev/null +++ b/src/main/resources/rules/azure/azure_application_gateway_modified_or_deleted.yml @@ -0,0 +1,24 @@ +title: Azure Application Gateway Modified or Deleted +id: ad87d14e-7599-4633-ba81-aeb60cfe8cd6 +description: Identifies when a application gateway is modified or deleted. +author: Austin Songer +status: experimental +date: 2021/08/16 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - MICROSOFT.NETWORK/APPLICATIONGATEWAYS/WRITE + - MICROSOFT.NETWORK/APPLICATIONGATEWAYS/DELETE + condition: selection +level: medium +tags: + - attack.impact +falsepositives: + - Application gateway being modified or deleted may be performed by a system administrator. + - Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Application gateway modified or deleted from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_application_security_group_modified_or_deleted.yml b/src/main/resources/rules/azure/azure_application_security_group_modified_or_deleted.yml new file mode 100644 index 000000000..a770842d0 --- /dev/null +++ b/src/main/resources/rules/azure/azure_application_security_group_modified_or_deleted.yml @@ -0,0 +1,24 @@ +title: Azure Application Security Group Modified or Deleted +id: 835747f1-9329-40b5-9cc3-97d465754ce6 +description: Identifies when a application security group is modified or deleted. +author: Austin Songer +status: experimental +date: 2021/08/16 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - MICROSOFT.NETWORK/APPLICATIONSECURITYGROUPS/WRITE + - MICROSOFT.NETWORK/APPLICATIONSECURITYGROUPS/DELETE + condition: selection +level: medium +tags: + - attack.impact +falsepositives: + - Application security group being modified or deleted may be performed by a system administrator. + - Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Application security group modified or deleted from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_blocked_account_attempt.yml b/src/main/resources/rules/azure/azure_blocked_account_attempt.yml new file mode 100644 index 000000000..cf0984b80 --- /dev/null +++ b/src/main/resources/rules/azure/azure_blocked_account_attempt.yml @@ -0,0 +1,23 @@ +title: Account Disabled or Blocked for Sign in Attempts +id: 4afac85c-224a-4dd7-b1af-8da40e1c60bd +description: Detects when an account is disabled or blocked for sign in but tried to log in +author: Yochana Henderson, '@Yochana-H' +date: 2022/06/17 +references: + - https://docs.microsoft.com/en-gb/azure/active-directory/fundamentals/security-operations-privileged-accounts +logsource: + product: azure + service: signinlogs +detection: + selection: + ResultType: 50057 + ResultDescription: Failure + condition: selection +level: medium +falsepositives: + - Account disabled or blocked in error + - Automation account has been blocked or disabled +status: experimental +tags: + - attack.credential_access + - attack.t1110 diff --git a/src/main/resources/rules/azure/azure_change_to_authentication_method.yml b/src/main/resources/rules/azure/azure_change_to_authentication_method.yml new file mode 100644 index 000000000..812357a27 --- /dev/null +++ b/src/main/resources/rules/azure/azure_change_to_authentication_method.yml @@ -0,0 +1,22 @@ +title: Change to Authentication Method +id: 4d78a000-ab52-4564-88a5-7ab5242b20c7 +status: experimental +author: AlertIQ +date: 2021/10/10 +description: Change to authentication method could be an indicated of an attacker adding an auth method to the account so they can have continued access. +references: + - https://docs.microsoft.com/en-us/azure/active-directory/fundamentals/security-operations-privileged-accounts +logsource: + product: azure + service: auditlogs +detection: + selection: + LoggedByService: 'Authentication Methods' + Category: 'UserManagement' + OperationName: 'User registered security info' + condition: selection +level: medium +falsepositives: + - Unknown +tags: + - attack.credential_access diff --git a/src/main/resources/rules/azure/azure_conditional_access_failure.yml b/src/main/resources/rules/azure/azure_conditional_access_failure.yml new file mode 100644 index 000000000..d0af28e9b --- /dev/null +++ b/src/main/resources/rules/azure/azure_conditional_access_failure.yml @@ -0,0 +1,24 @@ +title: Sign-in Failure Due to Conditional Access Requirements Not Met +id: b4a6d707-9430-4f5f-af68-0337f52d5c42 +description: Define a baseline threshold for failed sign-ins due to Conditional Access failures +author: Yochana Henderson, '@Yochana-H' +date: 2022/06/01 +references: + - https://docs.microsoft.com/en-gb/azure/active-directory/fundamentals/security-operations-privileged-accounts +logsource: + product: azure + service: signinlogs +detection: + selection: + ResultType: 53003 + Resultdescription: Blocked by Conditional Access + condition: selection +falsepositives: + - Service Account misconfigured + - Misconfigured Systems + - Vulnerability Scanners +level: high +status: experimental +tags: + - attack.credential_access + - attack.t1110 diff --git a/src/main/resources/rules/azure/azure_container_registry_created_or_deleted.yml b/src/main/resources/rules/azure/azure_container_registry_created_or_deleted.yml new file mode 100644 index 000000000..b394ce894 --- /dev/null +++ b/src/main/resources/rules/azure/azure_container_registry_created_or_deleted.yml @@ -0,0 +1,27 @@ +title: Azure Container Registry Created or Deleted +id: 93e0ef48-37c8-49ed-a02c-038aab23628e +description: Detects when a Container Registry is created or deleted. +author: Austin Songer @austinsonger +status: experimental +date: 2021/08/07 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations#microsoftkubernetes + - https://www.microsoft.com/security/blog/2021/03/23/secure-containerized-environments-with-updated-threat-matrix-for-kubernetes/ + - https://www.microsoft.com/security/blog/2020/04/02/attack-matrix-kubernetes/ + - https://medium.com/mitre-engenuity/att-ck-for-containers-now-available-4c2359654bf1 + - https://attack.mitre.org/matrices/enterprise/cloud/ +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - MICROSOFT.CONTAINERREGISTRY/REGISTRIES/WRITE + - MICROSOFT.CONTAINERREGISTRY/REGISTRIES/DELETE + condition: selection +level: low +tags: + - attack.impact +falsepositives: + - Container Registry being created or deleted may be performed by a system administrator. Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Container Registry created or deleted from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_creating_number_of_resources_detection.yml b/src/main/resources/rules/azure/azure_creating_number_of_resources_detection.yml new file mode 100644 index 000000000..04c3ed96e --- /dev/null +++ b/src/main/resources/rules/azure/azure_creating_number_of_resources_detection.yml @@ -0,0 +1,22 @@ +title: Number Of Resource Creation Or Deployment Activities +id: d2d901db-7a75-45a1-bc39-0cbf00812192 +status: test +description: Number of VM creations or deployment activities occur in Azure via the azureactivity log. +author: sawwinnnaung +references: + - https://github.com/Azure/Azure-Sentinel/blob/master/Detections/azureactivity/Creating_Anomalous_Number_Of_Resources_detection.yaml +date: 2020/05/07 +modified: 2021/11/27 +logsource: + product: azure + service: azureactivity +detection: + keywords: + - Microsoft.Compute/virtualMachines/write + - Microsoft.Resources/deployments/write + condition: keywords +falsepositives: + - Valid change +level: medium +tags: + - attack.t1098 diff --git a/src/main/resources/rules/azure/azure_device_no_longer_managed_or_compliant.yml b/src/main/resources/rules/azure/azure_device_no_longer_managed_or_compliant.yml new file mode 100644 index 000000000..0c33bda86 --- /dev/null +++ b/src/main/resources/rules/azure/azure_device_no_longer_managed_or_compliant.yml @@ -0,0 +1,22 @@ +title: Azure Device No Longer Managed or Compliant +id: 542b9912-c01f-4e3f-89a8-014c48cdca7d +description: Identifies when a device in azure is no longer managed or compliant +author: Austin Songer @austinsonger +status: experimental +date: 2021/09/03 +references: + - https://docs.microsoft.com/en-us/azure/active-directory/reports-monitoring/reference-audit-activities#core-directory +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - Device no longer compliant + - Device no longer managed + condition: selection +level: medium +tags: + - attack.impact +falsepositives: + - Administrator may have forgotten to review the device. diff --git a/src/main/resources/rules/azure/azure_device_or_configuration_modified_or_deleted.yml b/src/main/resources/rules/azure/azure_device_or_configuration_modified_or_deleted.yml new file mode 100644 index 000000000..cc5aa33d2 --- /dev/null +++ b/src/main/resources/rules/azure/azure_device_or_configuration_modified_or_deleted.yml @@ -0,0 +1,26 @@ +title: Azure Device or Configuration Modified or Deleted +id: 46530378-f9db-4af9-a9e5-889c177d3881 +description: Identifies when a device or device configuration in azure is modified or deleted. +author: Austin Songer @austinsonger +status: experimental +date: 2021/09/03 +references: + - https://docs.microsoft.com/en-us/azure/active-directory/reports-monitoring/reference-audit-activities#core-directory +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - Delete device + - Delete device configuration + - Update device + - Update device configuration + condition: selection +level: medium +tags: + - attack.impact +falsepositives: + - Device or device configuration being modified or deleted may be performed by a system administrator. + - Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Device or device configuration modified or deleted from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_dns_zone_modified_or_deleted.yml b/src/main/resources/rules/azure/azure_dns_zone_modified_or_deleted.yml new file mode 100644 index 000000000..80d55642b --- /dev/null +++ b/src/main/resources/rules/azure/azure_dns_zone_modified_or_deleted.yml @@ -0,0 +1,24 @@ +title: Azure DNS Zone Modified or Deleted +id: af6925b0-8826-47f1-9324-337507a0babd +description: Identifies when DNS zone is modified or deleted. +author: Austin Songer @austinsonger +status: experimental +date: 2021/08/08 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations#microsoftkubernetes +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message|startswith: MICROSOFT.NETWORK/DNSZONES + properties.message|endswith: + - /WRITE + - /DELETE + condition: selection +level: medium +tags: + - attack.impact +falsepositives: + - DNS zone modified and deleted may be performed by a system administrator. Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - DNS zone modification from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_federation_modified.yml b/src/main/resources/rules/azure/azure_federation_modified.yml new file mode 100644 index 000000000..e8c1458f5 --- /dev/null +++ b/src/main/resources/rules/azure/azure_federation_modified.yml @@ -0,0 +1,26 @@ +title: Azure Domain Federation Settings Modified +id: 352a54e1-74ba-4929-9d47-8193d67aba1e +description: Identifies when an user or application modified the federation settings on the domain. +author: Austin Songer +status: experimental +date: 2021/09/06 +modified: 2022/06/08 +references: + - https://attack.mitre.org/techniques/T1078 + - https://docs.microsoft.com/en-us/azure/active-directory/hybrid/how-to-connect-monitor-federation-changes +logsource: + product: azure + service: auditlogs +detection: + selection: + ActivityDisplayName: Set federation settings on domain + condition: selection +level: medium +tags: + - attack.initial_access + - attack.t1078 +falsepositives: + - Federation Settings being modified or deleted may be performed by a system administrator. + - Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Federation Settings modified from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. + diff --git a/src/main/resources/rules/azure/azure_firewall_modified_or_deleted.yml b/src/main/resources/rules/azure/azure_firewall_modified_or_deleted.yml new file mode 100644 index 000000000..2d09758ae --- /dev/null +++ b/src/main/resources/rules/azure/azure_firewall_modified_or_deleted.yml @@ -0,0 +1,23 @@ +title: Azure Firewall Modified or Deleted +id: 512cf937-ea9b-4332-939c-4c2c94baadcd +description: Identifies when a firewall is created, modified, or deleted. +author: Austin Songer @austinsonger +status: experimental +date: 2021/08/08 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - MICROSOFT.NETWORK/AZUREFIREWALLS/WRITE + - MICROSOFT.NETWORK/AZUREFIREWALLS/DELETE + condition: selection +level: medium +tags: + - attack.impact +falsepositives: + - Firewall being modified or deleted may be performed by a system administrator. Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Firewall modified or deleted from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_firewall_rule_collection_modified_or_deleted.yml b/src/main/resources/rules/azure/azure_firewall_rule_collection_modified_or_deleted.yml new file mode 100644 index 000000000..30281498e --- /dev/null +++ b/src/main/resources/rules/azure/azure_firewall_rule_collection_modified_or_deleted.yml @@ -0,0 +1,27 @@ +title: Azure Firewall Rule Collection Modified or Deleted +id: 025c9fe7-db72-49f9-af0d-31341dd7dd57 +description: Identifies when Rule Collections (Application, NAT, and Network) is being modified or deleted. +author: Austin Songer @austinsonger +status: experimental +date: 2021/08/08 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - MICROSOFT.NETWORK/AZUREFIREWALLS/APPLICATIONRULECOLLECTIONS/WRITE + - MICROSOFT.NETWORK/AZUREFIREWALLS/APPLICATIONRULECOLLECTIONS/DELETE + - MICROSOFT.NETWORK/AZUREFIREWALLS/NATRULECOLLECTIONS/WRITE + - MICROSOFT.NETWORK/AZUREFIREWALLS/NATRULECOLLECTIONS/DELETE + - MICROSOFT.NETWORK/AZUREFIREWALLS/NETWORKRULECOLLECTIONS/WRITE + - MICROSOFT.NETWORK/AZUREFIREWALLS/NETWORKRULECOLLECTIONS/DELETE + condition: selection +level: medium +tags: + - attack.impact +falsepositives: + - Rule Collections (Application, NAT, and Network) being modified or deleted may be performed by a system administrator. Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Rule Collections (Application, NAT, and Network) modified or deleted from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_granting_permission_detection.yml b/src/main/resources/rules/azure/azure_granting_permission_detection.yml new file mode 100644 index 000000000..d1fb9dfd4 --- /dev/null +++ b/src/main/resources/rules/azure/azure_granting_permission_detection.yml @@ -0,0 +1,21 @@ +title: Granting Of Permissions To An Account +id: a622fcd2-4b5a-436a-b8a2-a4171161833c +status: test +description: Identifies IPs from which users grant access to other users on azure resources and alerts when a previously unseen source IP address is used. +author: sawwinnnaung +references: + - https://github.com/Azure/Azure-Sentinel/blob/master/Detections/azureactivity/Granting_Permissions_To_Account_detection.yaml +date: 2020/05/07 +modified: 2021/11/27 +logsource: + product: azure + service: azureactivity +detection: + keywords: + - Microsoft.Authorization/roleAssignments/write + condition: keywords +falsepositives: + - Valid change +level: medium +tags: + - attack.t1098 diff --git a/src/main/resources/rules/azure/azure_keyvault_key_modified_or_deleted.yml b/src/main/resources/rules/azure/azure_keyvault_key_modified_or_deleted.yml new file mode 100644 index 000000000..fe2af9ae7 --- /dev/null +++ b/src/main/resources/rules/azure/azure_keyvault_key_modified_or_deleted.yml @@ -0,0 +1,34 @@ +title: Azure Keyvault Key Modified or Deleted +id: 80eeab92-0979-4152-942d-96749e11df40 +description: Identifies when a Keyvault Key is modified or deleted in Azure. +author: Austin Songer @austinsonger +status: experimental +date: 2021/08/16 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - MICROSOFT.KEYVAULT/VAULTS/KEYS/UPDATE/ACTION + - MICROSOFT.KEYVAULT/VAULTS/KEYS/CREATE + - MICROSOFT.KEYVAULT/VAULTS/KEYS/CREATE/ACTION + - MICROSOFT.KEYVAULT/VAULTS/KEYS/IMPORT/ACTION + - MICROSOFT.KEYVAULT/VAULTS/KEYS/RECOVER/ACTION + - MICROSOFT.KEYVAULT/VAULTS/KEYS/RESTORE/ACTION + - MICROSOFT.KEYVAULT/VAULTS/KEYS/DELETE + - MICROSOFT.KEYVAULT/VAULTS/KEYS/BACKUP/ACTION + - MICROSOFT.KEYVAULT/VAULTS/KEYS/PURGE/ACTION + condition: selection +level: medium +tags: + - attack.impact + - attack.credential_access + - attack.t1552 + - attack.t1552.001 +falsepositives: + - Key being modified or deleted may be performed by a system administrator. + - Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Key modified or deleted from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_keyvault_modified_or_deleted.yml b/src/main/resources/rules/azure/azure_keyvault_modified_or_deleted.yml new file mode 100644 index 000000000..cc596dcbf --- /dev/null +++ b/src/main/resources/rules/azure/azure_keyvault_modified_or_deleted.yml @@ -0,0 +1,29 @@ +title: Azure Key Vault Modified or Deleted +id: 459a2970-bb84-4e6a-a32e-ff0fbd99448d +description: Identifies when a key vault is modified or deleted. +author: Austin Songer @austinsonger +status: experimental +date: 2021/08/16 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - MICROSOFT.KEYVAULT/VAULTS/WRITE + - MICROSOFT.KEYVAULT/VAULTS/DELETE + - MICROSOFT.KEYVAULT/VAULTS/DEPLOY/ACTION + - MICROSOFT.KEYVAULT/VAULTS/ACCESSPOLICIES/WRITE + condition: selection +level: medium +tags: + - attack.impact + - attack.credential_access + - attack.t1552 + - attack.t1552.001 +falsepositives: + - Key Vault being modified or deleted may be performed by a system administrator. + - Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Key Vault modified or deleted from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_keyvault_secrets_modified_or_deleted.yml b/src/main/resources/rules/azure/azure_keyvault_secrets_modified_or_deleted.yml new file mode 100644 index 000000000..53f85064a --- /dev/null +++ b/src/main/resources/rules/azure/azure_keyvault_secrets_modified_or_deleted.yml @@ -0,0 +1,33 @@ +title: Azure Keyvault Secrets Modified or Deleted +id: b831353c-1971-477b-abb6-2828edc3bca1 +description: Identifies when secrets are modified or deleted in Azure. +author: Austin Songer @austinsonger +status: experimental +date: 2021/08/16 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - MICROSOFT.KEYVAULT/VAULTS/SECRETS/WRITE + - MICROSOFT.KEYVAULT/VAULTS/SECRETS/DELETE + - MICROSOFT.KEYVAULT/VAULTS/SECRETS/BACKUP/ACTION + - MICROSOFT.KEYVAULT/VAULTS/SECRETS/PURGE/ACTION + - MICROSOFT.KEYVAULT/VAULTS/SECRETS/UPDATE/ACTION + - MICROSOFT.KEYVAULT/VAULTS/SECRETS/RECOVER/ACTION + - MICROSOFT.KEYVAULT/VAULTS/SECRETS/RESTORE/ACTION + - MICROSOFT.KEYVAULT/VAULTS/SECRETS/SETSECRET/ACTION + condition: selection +level: medium +tags: + - attack.impact + - attack.credential_access + - attack.t1552 + - attack.t1552.001 +falsepositives: + - Secrets being modified or deleted may be performed by a system administrator. + - Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Secrets modified or deleted from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_kubernetes_admission_controller.yml b/src/main/resources/rules/azure/azure_kubernetes_admission_controller.yml new file mode 100644 index 000000000..3e2dbbbae --- /dev/null +++ b/src/main/resources/rules/azure/azure_kubernetes_admission_controller.yml @@ -0,0 +1,34 @@ +title: Azure Kubernetes Admission Controller +id: a61a3c56-4ce2-4351-a079-88ae4cbd2b58 +description: Identifies when an admission controller is executed in Azure Kubernetes. A Kubernetes Admission controller intercepts, and possibly modifies, requests to the Kubernetes API server. The behavior of this admission controller is determined by an admission webhook (MutatingAdmissionWebhook or ValidatingAdmissionWebhook) that the user deploys in the cluster. An adversary can use such webhooks as the MutatingAdmissionWebhook for obtaining persistence in the cluster. For example, attackers can intercept and modify the pod creation operations in the cluster and add their malicious container to every created pod. An adversary can use the webhook ValidatingAdmissionWebhook, which could be used to obtain access credentials. An adversary could use the webhook to intercept the requests to the API server, record secrets, and other sensitive information. +author: Austin Songer @austinsonger +status: experimental +date: 2021/11/25 +modified: 2021/11/26 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations#microsoftkubernetes +logsource: + product: azure + service: activitylogs +detection: + selection1: + properties.message|startswith: MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/ADMISSIONREGISTRATION.K8S.IO + properties.message|endswith: + - /MUTATINGWEBHOOKCONFIGURATIONS/WRITE + - /VALIDATINGWEBHOOKCONFIGURATIONS/WRITE + selection2: + properties.message|startswith: MICROSOFT.CONTAINERSERVICE/MANAGEDCLUSTERS/ADMISSIONREGISTRATION.K8S.IO + properties.message|endswith: + - /MUTATINGWEBHOOKCONFIGURATIONS/WRITE + - /VALIDATINGWEBHOOKCONFIGURATIONS/WRITE + condition: selection1 or selection2 +level: medium +tags: + - attack.persistence + - attack.t1078 + - attack.credential_access + - attack.t1552 + - attack.t1552.007 +falsepositives: +- Azure Kubernetes Admissions Controller may be done by a system administrator. +- If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_kubernetes_cluster_created_or_deleted.yml b/src/main/resources/rules/azure/azure_kubernetes_cluster_created_or_deleted.yml new file mode 100644 index 000000000..6af9fe8ac --- /dev/null +++ b/src/main/resources/rules/azure/azure_kubernetes_cluster_created_or_deleted.yml @@ -0,0 +1,28 @@ +title: Azure Kubernetes Cluster Created or Deleted +id: 9541f321-7cba-4b43-80fc-fbd1fb922808 +description: Detects when a Azure Kubernetes Cluster is created or deleted. +author: Austin Songer @austinsonger +status: experimental +date: 2021/08/07 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations#microsoftkubernetes + - https://www.microsoft.com/security/blog/2021/03/23/secure-containerized-environments-with-updated-threat-matrix-for-kubernetes/ + - https://www.microsoft.com/security/blog/2020/04/02/attack-matrix-kubernetes/ + - https://medium.com/mitre-engenuity/att-ck-for-containers-now-available-4c2359654bf1 + - https://attack.mitre.org/matrices/enterprise/cloud/ +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/WRITE + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/DELETE + condition: selection +level: low +tags: + - attack.impact +falsepositives: + - Kubernetes cluster being created or deleted may be performed by a system administrator. Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Kubernetes cluster created or deleted from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. + diff --git a/src/main/resources/rules/azure/azure_kubernetes_cronjob.yml b/src/main/resources/rules/azure/azure_kubernetes_cronjob.yml new file mode 100644 index 000000000..146f196aa --- /dev/null +++ b/src/main/resources/rules/azure/azure_kubernetes_cronjob.yml @@ -0,0 +1,34 @@ +title: Azure Kubernetes CronJob +id: 1c71e254-6655-42c1-b2d6-5e4718d7fc0a +description: Identifies when a Azure Kubernetes CronJob runs in Azure Cloud. Kubernetes Job is a controller that creates one or more pods and ensures that a specified number of them successfully terminate. Kubernetes Job can be used to run containers that perform finite tasks for batch jobs. Kubernetes CronJob is used to schedule Jobs. An Adversary may use Kubernetes CronJob for scheduling execution of malicious code that would run as a container in the cluster. +author: Austin Songer @austinsonger +status: experimental +date: 2021/11/22 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations#microsoftkubernetes + - https://kubernetes.io/docs/concepts/workloads/controllers/cron-jobs/ + - https://kubernetes.io/docs/concepts/workloads/controllers/job/ + - https://www.microsoft.com/security/blog/2020/04/02/attack-matrix-kubernetes/ +logsource: + product: azure + service: activitylogs +detection: + selection1: + properties.message|startswith: MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/BATCH + properties.message|endswith: + - /CRONJOBS/WRITE + - /JOBS/WRITE + selection2: + properties.message|startswith: MICROSOFT.CONTAINERSERVICE/MANAGEDCLUSTERS/BATCH + properties.message|endswith: + - /CRONJOBS/WRITE + - /JOBS/WRITE + condition: selection1 or selection2 +level: medium +tags: + - attack.persistence + - attack.privilege_escalation + - attack.execution +falsepositives: + - Azure Kubernetes CronJob/Job may be done by a system administrator. + - If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_kubernetes_events_deleted.yml b/src/main/resources/rules/azure/azure_kubernetes_events_deleted.yml new file mode 100644 index 000000000..7c4aefd91 --- /dev/null +++ b/src/main/resources/rules/azure/azure_kubernetes_events_deleted.yml @@ -0,0 +1,24 @@ +title: Azure Kubernetes Events Deleted +id: 225d8b09-e714-479c-a0e4-55e6f29adf35 +description: Detects when Events are deleted in Azure Kubernetes. An adversary may delete events in Azure Kubernetes in an attempt to evade detection. +author: Austin Songer @austinsonger +status: experimental +date: 2021/07/24 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations#microsoftkubernetes + - https://github.com/elastic/detection-rules/blob/da3852b681cf1a33898b1535892eab1f3a76177a/rules/integrations/azure/defense_evasion_kubernetes_events_deleted.toml +logsource: + product: azure + service: activitylogs +detection: + selection_operation_name: + properties.message: MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/EVENTS.K8S.IO/EVENTS/DELETE + condition: selection_operation_name +level: medium +tags: + - attack.defense_evasion + - attack.t1562 + - attack.t1562.001 +falsepositives: +- Event deletions may be done by a system or network administrator. Verify whether the username, hostname, and/or resource name should be making changes in your environment. Events deletions from unfamiliar users or hosts should be investigated. If known behavior is causing false positives, it can be exempted from the rule. + diff --git a/src/main/resources/rules/azure/azure_kubernetes_network_policy_change.yml b/src/main/resources/rules/azure/azure_kubernetes_network_policy_change.yml new file mode 100644 index 000000000..e731c0d87 --- /dev/null +++ b/src/main/resources/rules/azure/azure_kubernetes_network_policy_change.yml @@ -0,0 +1,30 @@ +title: Azure Kubernetes Network Policy Change +id: 08d6ac24-c927-4469-b3b7-2e422d6e3c43 +description: Identifies when a Azure Kubernetes network policy is modified or deleted. +author: Austin Songer @austinsonger +status: experimental +date: 2021/08/07 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations#microsoftkubernetes + - https://www.microsoft.com/security/blog/2021/03/23/secure-containerized-environments-with-updated-threat-matrix-for-kubernetes/ + - https://www.microsoft.com/security/blog/2020/04/02/attack-matrix-kubernetes/ + - https://medium.com/mitre-engenuity/att-ck-for-containers-now-available-4c2359654bf1 + - https://attack.mitre.org/matrices/enterprise/cloud/ +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/NETWORKING.K8S.IO/NETWORKPOLICIES/WRITE + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/NETWORKING.K8S.IO/NETWORKPOLICIES/DELETE + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/EXTENSIONS/NETWORKPOLICIES/WRITE + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/EXTENSIONS/NETWORKPOLICIES/DELETE + condition: selection +level: medium +tags: + - attack.impact + - attack.credential_access +falsepositives: + - Network Policy being modified and deleted may be performed by a system administrator. Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Network Policy being modified and deleted from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_kubernetes_pods_deleted.yml b/src/main/resources/rules/azure/azure_kubernetes_pods_deleted.yml new file mode 100644 index 000000000..ac7d0e1df --- /dev/null +++ b/src/main/resources/rules/azure/azure_kubernetes_pods_deleted.yml @@ -0,0 +1,22 @@ +title: Azure Kubernetes Pods Deleted +id: b02f9591-12c3-4965-986a-88028629b2e1 +description: Identifies the deletion of Azure Kubernetes Pods. +author: Austin Songer @austinsonger +status: experimental +date: 2021/07/24 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations#microsoftkubernetes + - https://github.com/elastic/detection-rules/blob/065bf48a9987cd8bd826c098a30ce36e6868ee46/rules/integrations/azure/impact_kubernetes_pod_deleted.toml +logsource: + product: azure + service: activitylogs +detection: + selection_operation_name: + properties.message: MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/PODS/DELETE + condition: selection_operation_name +level: medium +tags: + - attack.impact +falsepositives: +- Pods may be deleted by a system administrator. Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. +- Pods deletions from unfamiliar users or hosts should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_kubernetes_role_access.yml b/src/main/resources/rules/azure/azure_kubernetes_role_access.yml new file mode 100644 index 000000000..b13335b6b --- /dev/null +++ b/src/main/resources/rules/azure/azure_kubernetes_role_access.yml @@ -0,0 +1,33 @@ +title: Azure Kubernetes Sensitive Role Access +id: 818fee0c-e0ec-4e45-824e-83e4817b0887 +description: Identifies when ClusterRoles/Roles are being modified or deleted. +author: Austin Songer @austinsonger +status: experimental +date: 2021/08/07 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations#microsoftkubernetes + - https://www.microsoft.com/security/blog/2021/03/23/secure-containerized-environments-with-updated-threat-matrix-for-kubernetes/ + - https://www.microsoft.com/security/blog/2020/04/02/attack-matrix-kubernetes/ + - https://medium.com/mitre-engenuity/att-ck-for-containers-now-available-4c2359654bf1 + - https://attack.mitre.org/matrices/enterprise/cloud/ +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/RBAC.AUTHORIZATION.K8S.IO/ROLES/WRITE + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/RBAC.AUTHORIZATION.K8S.IO/ROLES/DELETE + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/RBAC.AUTHORIZATION.K8S.IO/ROLES/BIND/ACTION + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/RBAC.AUTHORIZATION.K8S.IO/ROLES/ESCALATE/ACTION + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/RBAC.AUTHORIZATION.K8S.IO/CLUSTERROLES/WRITE + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/RBAC.AUTHORIZATION.K8S.IO/CLUSTERROLES/DELETE + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/RBAC.AUTHORIZATION.K8S.IO/CLUSTERROLES/BIND/ACTION + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/RBAC.AUTHORIZATION.K8S.IO/CLUSTERROLES/ESCALATE/ACTION + condition: selection +level: medium +tags: + - attack.impact +falsepositives: + - ClusterRoles/Roles being modified and deleted may be performed by a system administrator. Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - ClusterRoles/Roles modification from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_kubernetes_rolebinding_modified_or_deleted.yml b/src/main/resources/rules/azure/azure_kubernetes_rolebinding_modified_or_deleted.yml new file mode 100644 index 000000000..923169ffe --- /dev/null +++ b/src/main/resources/rules/azure/azure_kubernetes_rolebinding_modified_or_deleted.yml @@ -0,0 +1,31 @@ +title: Azure Kubernetes RoleBinding/ClusterRoleBinding Modified and Deleted +id: 25cb259b-bbdc-4b87-98b7-90d7c72f8743 +description: Detects the creation or patching of potential malicious RoleBinding/ClusterRoleBinding. +author: Austin Songer @austinsonger +status: experimental +date: 2021/08/07 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations#microsoftkubernetes + - https://www.microsoft.com/security/blog/2021/03/23/secure-containerized-environments-with-updated-threat-matrix-for-kubernetes/ + - https://www.microsoft.com/security/blog/2020/04/02/attack-matrix-kubernetes/ + - https://medium.com/mitre-engenuity/att-ck-for-containers-now-available-4c2359654bf1 + - https://attack.mitre.org/matrices/enterprise/cloud/ +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/RBAC.AUTHORIZATION.K8S.IO/CLUSTERROLEBINDINGS/WRITE + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/RBAC.AUTHORIZATION.K8S.IO/CLUSTERROLEBINDINGS/DELETE + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/RBAC.AUTHORIZATION.K8S.IO/ROLEBINDINGS/WRITE + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/RBAC.AUTHORIZATION.K8S.IO/ROLEBINDINGS/DELETE + condition: selection +level: medium +tags: + - attack.impact + - attack.credential_access +falsepositives: + - RoleBinding/ClusterRoleBinding being modified and deleted may be performed by a system administrator. Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - RoleBinding/ClusterRoleBinding modification from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. + diff --git a/src/main/resources/rules/azure/azure_kubernetes_secret_or_config_object_access.yml b/src/main/resources/rules/azure/azure_kubernetes_secret_or_config_object_access.yml new file mode 100644 index 000000000..3f24ab0ba --- /dev/null +++ b/src/main/resources/rules/azure/azure_kubernetes_secret_or_config_object_access.yml @@ -0,0 +1,28 @@ +title: Azure Kubernetes Secret or Config Object Access +id: 7ee0b4aa-d8d4-4088-b661-20efdf41a04c +description: Identifies when a Kubernetes account access a sensitive objects such as configmaps or secrets. +author: Austin Songer @austinsonger +status: experimental +date: 2021/08/07 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations#microsoftkubernetes + - https://www.microsoft.com/security/blog/2021/03/23/secure-containerized-environments-with-updated-threat-matrix-for-kubernetes/ + - https://www.microsoft.com/security/blog/2020/04/02/attack-matrix-kubernetes/ + - https://medium.com/mitre-engenuity/att-ck-for-containers-now-available-4c2359654bf1 + - https://attack.mitre.org/matrices/enterprise/cloud/ +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/CONFIGMAPS/WRITE + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/CONFIGMAPS/DELETE + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/SECRETS/WRITE + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/SECRETS/DELETE + condition: selection +level: medium +tags: + - attack.impact +falsepositives: + - Sensitive objects may be accessed by a system administrator. Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. Sensitive objects accessed from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_kubernetes_service_account_modified_or_deleted.yml b/src/main/resources/rules/azure/azure_kubernetes_service_account_modified_or_deleted.yml new file mode 100644 index 000000000..6a56ea6c6 --- /dev/null +++ b/src/main/resources/rules/azure/azure_kubernetes_service_account_modified_or_deleted.yml @@ -0,0 +1,28 @@ +title: Azure Kubernetes Service Account Modified or Deleted +id: 12d027c3-b48c-4d9d-8bb6-a732200034b2 +description: Identifies when a service account is modified or deleted. +author: Austin Songer @austinsonger +status: experimental +date: 2021/08/07 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations#microsoftkubernetes + - https://www.microsoft.com/security/blog/2021/03/23/secure-containerized-environments-with-updated-threat-matrix-for-kubernetes/ + - https://www.microsoft.com/security/blog/2020/04/02/attack-matrix-kubernetes/ + - https://medium.com/mitre-engenuity/att-ck-for-containers-now-available-4c2359654bf1 + - https://attack.mitre.org/matrices/enterprise/cloud/ +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/SERVICEACCOUNTS/WRITE + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/SERVICEACCOUNTS/DELETE + - MICROSOFT.KUBERNETES/CONNECTEDCLUSTERS/SERVICEACCOUNTS/IMPERSONATE/ACTION + condition: selection +level: medium +tags: + - attack.impact +falsepositives: + - Service account being modified or deleted may be performed by a system administrator. Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Service account modified or deleted from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_login_to_disabled_account.yml b/src/main/resources/rules/azure/azure_login_to_disabled_account.yml new file mode 100644 index 000000000..8f8392ca8 --- /dev/null +++ b/src/main/resources/rules/azure/azure_login_to_disabled_account.yml @@ -0,0 +1,22 @@ +title: Login to Disabled Account +id: 908655e0-25cf-4ae1-b775-1c8ce9cf43d8 +status: experimental +author: AlertIQ +date: 2021/10/10 +description: Detect failed attempts to sign in to disabled accounts. +references: + - https://docs.microsoft.com/en-us/azure/active-directory/fundamentals/security-operations-privileged-accounts +logsource: + product: azure + service: signinlogs +detection: + selection: + ResultType: 50057 + ResultDescription: 'User account is disabled. The account has been disabled by an administrator.' + condition: selection +level: medium +falsepositives: + - Unknown +tags: + - attack.initial_access + - attack.t1078 diff --git a/src/main/resources/rules/azure/azure_mfa_denies.yml b/src/main/resources/rules/azure/azure_mfa_denies.yml new file mode 100644 index 000000000..97400b634 --- /dev/null +++ b/src/main/resources/rules/azure/azure_mfa_denies.yml @@ -0,0 +1,22 @@ +title: Multifactor Authentication Denied +id: e40f4962-b02b-4192-9bfe-245f7ece1f99 +status: experimental +author: AlertIQ +date: 2022/03/24 +description: User has indicated they haven't instigated the MFA prompt and could indicate an attacker has the password for the account. +references: + - https://www.microsoft.com/security/blog/2022/03/22/dev-0537-criminal-actor-targeting-organizations-for-data-exfiltration-and-destruction/ +logsource: + product: azure + service: signinlogs +detection: + selection: + AuthenticationRequirement: 'multiFactorAuthentication' + Status|contains: 'MFA Denied' + condition: selection +level: medium +falsepositives: + - Users actually login but miss-click into the Deny button when MFA prompt. +tags: + - attack.initial_access + - attack.t1078.004 diff --git a/src/main/resources/rules/azure/azure_mfa_disabled.yml b/src/main/resources/rules/azure/azure_mfa_disabled.yml new file mode 100644 index 000000000..d8ce54bce --- /dev/null +++ b/src/main/resources/rules/azure/azure_mfa_disabled.yml @@ -0,0 +1,24 @@ +title: Disabled MFA to Bypass Authentication Mechanisms +id: 7ea78478-a4f9-42a6-9dcd-f861816122bf +status: experimental +description: Detection for when multi factor authentication has been disabled, which might indicate a malicious activity to bypass authentication mechanisms. +author: '@ionsor' +date: 2022/02/08 +references: + - https://attack.mitre.org/techniques/T1556/ + - https://docs.microsoft.com/en-us/azure/active-directory/authentication/howto-mfa-userstates +logsource: + product: azure + service: activitylogs +detection: + selection: + eventSource: AzureActiveDirectory + eventName: 'Disable Strong Authentication.' + status: success + condition: selection +falsepositives: + - Authorized modification by administrators +level: medium +tags: + - attack.persistence + - attack.t1556 diff --git a/src/main/resources/rules/azure/azure_mfa_interrupted.yml b/src/main/resources/rules/azure/azure_mfa_interrupted.yml new file mode 100644 index 000000000..8d997688e --- /dev/null +++ b/src/main/resources/rules/azure/azure_mfa_interrupted.yml @@ -0,0 +1,25 @@ +title: Multifactor Authentication Interupted +id: 5496ff55-42ec-4369-81cb-00f417029e25 +status: experimental +author: AlertIQ +date: 2021/10/10 +description: Identifies user login with multifactor authentication failures, which might be an indication an attacker has the password for the account but can't pass the MFA challenge. +references: + - https://docs.microsoft.com/en-us/azure/active-directory/fundamentals/security-operations-privileged-accounts +logsource: + product: azure + service: signinlogs +detection: + selection: + ResultType: 50074 + ResultDescription|contains: 'Strong Auth required' + selection1: + ResultType: 500121 + ResultDescription|contains: 'Authentication failed during strong authentication request' + condition: selection or selection1 +level: medium +falsepositives: + - Unknown +tags: + - attack.initial_access + - attack.t1078.004 diff --git a/src/main/resources/rules/azure/azure_network_firewall_policy_modified_or_deleted.yml b/src/main/resources/rules/azure/azure_network_firewall_policy_modified_or_deleted.yml new file mode 100644 index 000000000..2a36bbdab --- /dev/null +++ b/src/main/resources/rules/azure/azure_network_firewall_policy_modified_or_deleted.yml @@ -0,0 +1,25 @@ +title: Azure Network Firewall Policy Modified or Deleted +id: 83c17918-746e-4bd9-920b-8e098bf88c23 +description: Identifies when a Firewall Policy is Modified or Deleted. +author: Austin Songer @austinsonger +status: experimental +date: 2021/09/02 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - MICROSOFT.NETWORK/FIREWALLPOLICIES/WRITE + - MICROSOFT.NETWORK/FIREWALLPOLICIES/JOIN/ACTION + - MICROSOFT.NETWORK/FIREWALLPOLICIES/CERTIFICATES/ACTION + - MICROSOFT.NETWORK/FIREWALLPOLICIES/DELETE + condition: selection +level: medium +tags: + - attack.impact +falsepositives: + - Firewall Policy being modified or deleted may be performed by a system administrator. Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Firewall Policy modified or deleted from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_network_firewall_rule_modified_or_deleted.yml b/src/main/resources/rules/azure/azure_network_firewall_rule_modified_or_deleted.yml new file mode 100644 index 000000000..a2ab1da57 --- /dev/null +++ b/src/main/resources/rules/azure/azure_network_firewall_rule_modified_or_deleted.yml @@ -0,0 +1,25 @@ +title: Azure Firewall Rule Configuration Modified or Deleted +id: 2a7d64cf-81fa-4daf-ab1b-ab80b789c067 +description: Identifies when a Firewall Rule Configuration is Modified or Deleted. +author: Austin Songer @austinsonger +status: experimental +date: 2021/08/08 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - MICROSOFT.NETWORK/FIREWALLPOLICIES/RULECOLLECTIONGROUPS/WRITE + - MICROSOFT.NETWORK/FIREWALLPOLICIES/RULECOLLECTIONGROUPS/DELETE + - MICROSOFT.NETWORK/FIREWALLPOLICIES/RULEGROUPS/WRITE + - MICROSOFT.NETWORK/FIREWALLPOLICIES/RULEGROUPS/DELETE + condition: selection +level: medium +tags: + - attack.impact +falsepositives: + - Firewall Rule Configuration being modified or deleted may be performed by a system administrator. Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Firewall Rule Configuration modified or deleted from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_network_p2s_vpn_modified_or_deleted.yml b/src/main/resources/rules/azure/azure_network_p2s_vpn_modified_or_deleted.yml new file mode 100644 index 000000000..c54bd0d56 --- /dev/null +++ b/src/main/resources/rules/azure/azure_network_p2s_vpn_modified_or_deleted.yml @@ -0,0 +1,27 @@ +title: Azure Point-to-site VPN Modified or Deleted +id: d9557b75-267b-4b43-922f-a775e2d1f792 +description: Identifies when a Point-to-site VPN is Modified or Deleted. +author: Austin Songer @austinsonger +status: experimental +date: 2021/08/08 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - MICROSOFT.NETWORK/P2SVPNGATEWAYS/WRITE + - MICROSOFT.NETWORK/P2SVPNGATEWAYS/DELETE + - MICROSOFT.NETWORK/P2SVPNGATEWAYS/RESET/ACTION + - MICROSOFT.NETWORK/P2SVPNGATEWAYS/GENERATEVPNPROFILE/ACTION + - MICROSOFT.NETWORK/P2SVPNGATEWAYS/DISCONNECTP2SVPNCONNECTIONS/ACTION + - MICROSOFT.NETWORK/P2SVPNGATEWAYS/PROVIDERS/MICROSOFT.INSIGHTS/DIAGNOSTICSETTINGS/WRITE + condition: selection +level: medium +tags: + - attack.impact +falsepositives: + - Point-to-site VPN being modified or deleted may be performed by a system administrator. Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Point-to-site VPN modified or deleted from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_network_security_modified_or_deleted.yml b/src/main/resources/rules/azure/azure_network_security_modified_or_deleted.yml new file mode 100644 index 000000000..cd2f06382 --- /dev/null +++ b/src/main/resources/rules/azure/azure_network_security_modified_or_deleted.yml @@ -0,0 +1,27 @@ +title: Azure Network Security Configuration Modified or Deleted +id: d22b4df4-5a67-4859-a578-8c9a0b5af9df +description: Identifies when a network security configuration is modified or deleted. +author: Austin Songer @austinsonger +status: experimental +date: 2021/08/08 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - MICROSOFT.NETWORK/NETWORKSECURITYGROUPS/WRITE + - MICROSOFT.NETWORK/NETWORKSECURITYGROUPS/DELETE + - MICROSOFT.NETWORK/NETWORKSECURITYGROUPS/SECURITYRULES/WRITE + - MICROSOFT.NETWORK/NETWORKSECURITYGROUPS/SECURITYRULES/DELETE + - MICROSOFT.NETWORK/NETWORKSECURITYGROUPS/JOIN/ACTION + - MICROSOFT.NETWORK/NETWORKSECURITYGROUPS/PROVIDERS/MICROSOFT.INSIGHTS/DIAGNOSTICSETTINGS/WRITE + condition: selection +level: medium +tags: + - attack.impact +falsepositives: + - Network Security Configuration being modified or deleted may be performed by a system administrator. Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Network Security Configuration modified or deleted from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_network_virtual_device_modified_or_deleted.yml b/src/main/resources/rules/azure/azure_network_virtual_device_modified_or_deleted.yml new file mode 100644 index 000000000..5eefd7274 --- /dev/null +++ b/src/main/resources/rules/azure/azure_network_virtual_device_modified_or_deleted.yml @@ -0,0 +1,32 @@ +title: Azure Virtual Network Device Modified or Deleted +id: 15ef3fac-f0f0-4dc4-ada0-660aa72980b3 +description: Identifies when a virtual network device is being modified or deleted. This can be a network interface, network virtual appliance, virtual hub, or virtual router. +author: Austin Songer @austinsonger +status: experimental +date: 2021/08/08 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - MICROSOFT.NETWORK/NETWORKINTERFACES/TAPCONFIGURATIONS/WRITE + - MICROSOFT.NETWORK/NETWORKINTERFACES/TAPCONFIGURATIONS/DELETE + - MICROSOFT.NETWORK/NETWORKINTERFACES/WRITE + - MICROSOFT.NETWORK/NETWORKINTERFACES/JOIN/ACTION + - MICROSOFT.NETWORK/NETWORKINTERFACES/DELETE + - MICROSOFT.NETWORK/NETWORKVIRTUALAPPLIANCES/DELETE + - MICROSOFT.NETWORK/NETWORKVIRTUALAPPLIANCES/WRITE + - MICROSOFT.NETWORK/VIRTUALHUBS/DELETE + - MICROSOFT.NETWORK/VIRTUALHUBS/WRITE + - MICROSOFT.NETWORK/VIRTUALROUTERS/WRITE + - MICROSOFT.NETWORK/VIRTUALROUTERS/DELETE + condition: selection +level: medium +tags: + - attack.impact +falsepositives: + - Virtual Network Device being modified or deleted may be performed by a system administrator. Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Virtual Network Device modified or deleted from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_new_cloudshell_created.yml b/src/main/resources/rules/azure/azure_new_cloudshell_created.yml new file mode 100644 index 000000000..faa1a2c7b --- /dev/null +++ b/src/main/resources/rules/azure/azure_new_cloudshell_created.yml @@ -0,0 +1,22 @@ +title: Azure New CloudShell Created +id: 72af37e2-ec32-47dc-992b-bc288a2708cb +description: Identifies when a new cloudshell is created inside of Azure portal. +author: Austin Songer +status: experimental +date: 2021/09/21 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: MICROSOFT.PORTAL/CONSOLES/WRITE + condition: selection +level: medium +tags: + - attack.execution + - attack.t1059 +falsepositives: + - A new cloudshell may be created by a system administrator. + diff --git a/src/main/resources/rules/azure/azure_owner_removed_from_application_or_service_principal.yml b/src/main/resources/rules/azure/azure_owner_removed_from_application_or_service_principal.yml new file mode 100644 index 000000000..57b3f464f --- /dev/null +++ b/src/main/resources/rules/azure/azure_owner_removed_from_application_or_service_principal.yml @@ -0,0 +1,24 @@ +title: Azure Owner Removed From Application or Service Principal +id: 636e30d5-3736-42ea-96b1-e6e2f8429fd6 +description: Identifies when a owner is was removed from a application or service principal in Azure. +author: Austin Songer @austinsonger +status: experimental +date: 2021/09/03 +references: + - https://docs.microsoft.com/en-us/azure/active-directory/reports-monitoring/reference-audit-activities#application-proxy +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - Remove owner from service principal + - Remove owner from application + condition: selection +level: medium +tags: + - attack.defense_evasion +falsepositives: + - Owner being removed may be performed by a system administrator. + - Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Owner removed from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_rare_operations.yml b/src/main/resources/rules/azure/azure_rare_operations.yml new file mode 100644 index 000000000..169ae1b53 --- /dev/null +++ b/src/main/resources/rules/azure/azure_rare_operations.yml @@ -0,0 +1,27 @@ +title: Rare Subscription-level Operations In Azure +id: c1182e02-49a3-481c-b3de-0fadc4091488 +status: test +description: Identifies IPs from which users grant access to other users on azure resources and alerts when a previously unseen source IP address is used. +author: sawwinnnaung +references: + - https://github.com/Azure/Azure-Sentinel/blob/master/Detections/azureactivity/RareOperations.yaml +date: 2020/05/07 +modified: 2021/11/27 +logsource: + product: azure + service: azureactivity +detection: + keywords: + - Microsoft.DocumentDB/databaseAccounts/listKeys/action + - Microsoft.Maps/accounts/listKeys/action + - Microsoft.Media/mediaservices/listKeys/action + - Microsoft.CognitiveServices/accounts/listKeys/action + - Microsoft.Storage/storageAccounts/listKeys/action + - Microsoft.Compute/snapshots/write + - Microsoft.Network/networkSecurityGroups/write + condition: keywords +falsepositives: + - Valid change +level: medium +tags: + - attack.t1003 diff --git a/src/main/resources/rules/azure/azure_service_principal_created.yml b/src/main/resources/rules/azure/azure_service_principal_created.yml new file mode 100644 index 000000000..28d351a04 --- /dev/null +++ b/src/main/resources/rules/azure/azure_service_principal_created.yml @@ -0,0 +1,22 @@ +title: Azure Service Principal Created +id: 0ddcff6d-d262-40b0-804b-80eb592de8e3 +description: Identifies when a service principal is created in Azure. +author: Austin Songer @austinsonger +status: experimental +date: 2021/09/02 +references: + - https://docs.microsoft.com/en-us/azure/active-directory/reports-monitoring/reference-audit-activities#application-proxy +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: 'Add service principal' + condition: selection +level: medium +tags: + - attack.defense_evasion +falsepositives: + - Service principal being created may be performed by a system administrator. + - Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Service principal created from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_service_principal_removed.yml b/src/main/resources/rules/azure/azure_service_principal_removed.yml new file mode 100644 index 000000000..fbda2c690 --- /dev/null +++ b/src/main/resources/rules/azure/azure_service_principal_removed.yml @@ -0,0 +1,22 @@ +title: Azure Service Principal Removed +id: 448fd1ea-2116-4c62-9cde-a92d120e0f08 +description: Identifies when a service principal was removed in Azure. +author: Austin Songer @austinsonger +status: experimental +date: 2021/09/03 +references: + - https://docs.microsoft.com/en-us/azure/active-directory/reports-monitoring/reference-audit-activities#application-proxy +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: Remove service principal + condition: selection +level: medium +tags: + - attack.defense_evasion +falsepositives: + - Service principal being removed may be performed by a system administrator. + - Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Service principal removed from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_subscription_permissions_elevation_via_activitylogs.yml b/src/main/resources/rules/azure/azure_subscription_permissions_elevation_via_activitylogs.yml new file mode 100644 index 000000000..37c184fd9 --- /dev/null +++ b/src/main/resources/rules/azure/azure_subscription_permissions_elevation_via_activitylogs.yml @@ -0,0 +1,21 @@ +title: Azure Subscription Permission Elevation Via ActivityLogs +id: 09438caa-07b1-4870-8405-1dbafe3dad95 +status: experimental +author: Austin Songer @austinsonger +date: 2021/11/26 +description: Detects when a user has been elevated to manage all Azure Subscriptions. This change should be investigated immediately if it isn't planned. This setting could allow an attacker access to Azure subscriptions in your environment. +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations#microsoftauthorization +logsource: + product: azure + service: activitylogs +detection: + selection1: + properties.message: MICROSOFT.AUTHORIZATION/ELEVATEACCESS/ACTION + condition: selection1 +level: high +falsepositives: + - If this was approved by System Administrator. +tags: + - attack.initial_access + - attack.t1078 diff --git a/src/main/resources/rules/azure/azure_subscription_permissions_elevation_via_auditlogs.yml b/src/main/resources/rules/azure/azure_subscription_permissions_elevation_via_auditlogs.yml new file mode 100644 index 000000000..a566a107b --- /dev/null +++ b/src/main/resources/rules/azure/azure_subscription_permissions_elevation_via_auditlogs.yml @@ -0,0 +1,22 @@ +title: Azure Subscription Permission Elevation Via AuditLogs +id: ca9bf243-465e-494a-9e54-bf9fc239057d +status: experimental +author: Austin Songer @austinsonger +date: 2021/11/26 +description: Detects when a user has been elevated to manage all Azure Subscriptions. This change should be investigated immediately if it isn't planned. This setting could allow an attacker access to Azure subscriptions in your environment. +references: + - https://docs.microsoft.com/en-us/azure/active-directory/fundamentals/security-operations-privileged-accounts#assignment-and-elevation +logsource: + product: azure + service: auditlogs +detection: + selection: + Category: 'Administrative' + OperationName: 'Assigns the caller to user access admin' + condition: selection +level: high +falsepositives: + - If this was approved by System Administrator. +tags: + - attack.initial_access + - attack.t1078 diff --git a/src/main/resources/rules/azure/azure_suppression_rule_created.yml b/src/main/resources/rules/azure/azure_suppression_rule_created.yml new file mode 100644 index 000000000..7c079c960 --- /dev/null +++ b/src/main/resources/rules/azure/azure_suppression_rule_created.yml @@ -0,0 +1,22 @@ +title: Azure Suppression Rule Created +id: 92cc3e5d-eb57-419d-8c16-5c63f325a401 +description: Identifies when a suppression rule is created in Azure. Adversary's could attempt this to evade detection. +author: Austin Songer +status: experimental +date: 2021/08/16 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: MICROSOFT.SECURITY/ALERTSSUPPRESSIONRULES/WRITE + condition: selection +level: medium +tags: + - attack.impact +falsepositives: + - Suppression Rule being created may be performed by a system administrator. + - Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Suppression Rule created from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_unusual_authentication_interruption.yml b/src/main/resources/rules/azure/azure_unusual_authentication_interruption.yml new file mode 100644 index 000000000..f71db67b8 --- /dev/null +++ b/src/main/resources/rules/azure/azure_unusual_authentication_interruption.yml @@ -0,0 +1,28 @@ +title: Azure Unusual Authentication Interruption +id: 8366030e-7216-476b-9927-271d79f13cf3 +status: experimental +author: Austin Songer @austinsonger +date: 2021/11/26 +description: Detects when there is a interruption in the authentication process. +references: + - https://docs.microsoft.com/en-us/azure/active-directory/fundamentals/security-operations-privileged-accounts +logsource: + product: azure + service: signinlogs +detection: + selection1: + ResultType: 50097 + ResultDescription: 'Device authentication is required' + selection2: + ResultType: 50155 + ResultDescription: 'DeviceAuthenticationFailed' + selection3: + ResultType: 50158 + ResultDescription: 'ExternalSecurityChallenge - External security challenge was not satisfied' + condition: selection1 or selection2 or selection3 +level: medium +falsepositives: + - Unknown +tags: + - attack.initial_access + - attack.t1078 diff --git a/src/main/resources/rules/azure/azure_user_login_blocked_by_conditional_access.yml b/src/main/resources/rules/azure/azure_user_login_blocked_by_conditional_access.yml new file mode 100644 index 000000000..4b2b0bec9 --- /dev/null +++ b/src/main/resources/rules/azure/azure_user_login_blocked_by_conditional_access.yml @@ -0,0 +1,21 @@ +title: User Access Blocked by Azure Conditional Access +id: 9a60e676-26ac-44c3-814b-0c2a8b977adf +status: experimental +author: AlertIQ +date: 2021/10/10 +description: Detect access has been blocked by Conditional Access policies. The access policy does not allow token issuance which might be sights≈ of unauthorizeed login to valid accounts. +references: + - https://docs.microsoft.com/en-us/azure/active-directory/fundamentals/security-operations-privileged-accounts +logsource: + product: azure + service: signinlogs +detection: + selection: + ResultType: 53003 + condition: selection +level: medium +falsepositives: + - Unknown +tags: + - attack.credential_access + - attack.t1110 diff --git a/src/main/resources/rules/azure/azure_virtual_network_modified_or_deleted.yml b/src/main/resources/rules/azure/azure_virtual_network_modified_or_deleted.yml new file mode 100644 index 000000000..40a1604f6 --- /dev/null +++ b/src/main/resources/rules/azure/azure_virtual_network_modified_or_deleted.yml @@ -0,0 +1,26 @@ +title: Azure Virtual Network Modified or Deleted +id: bcfcc962-0e4a-4fd9-84bb-a833e672df3f +description: Identifies when a Virtual Network is modified or deleted in Azure. +author: Austin Songer @austinsonger +status: experimental +date: 2021/08/08 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message|startswith: + - MICROSOFT.NETWORK/VIRTUALNETWORKGATEWAYS/ + - MICROSOFT.NETWORK/VIRTUALNETWORKS/ + properties.message|endswith: + - /WRITE + - /DELETE + condition: selection +level: medium +tags: + - attack.impact +falsepositives: + - Virtual Network being modified or deleted may be performed by a system administrator. Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Virtual Network modified or deleted from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/azure/azure_vpn_connection_modified_or_deleted.yml b/src/main/resources/rules/azure/azure_vpn_connection_modified_or_deleted.yml new file mode 100644 index 000000000..e7cc2e36b --- /dev/null +++ b/src/main/resources/rules/azure/azure_vpn_connection_modified_or_deleted.yml @@ -0,0 +1,23 @@ +title: Azure VPN Connection Modified or Deleted +id: 61171ffc-d79c-4ae5-8e10-9323dba19cd3 +description: Identifies when a VPN connection is modified or deleted. +author: Austin Songer @austinsonger +status: experimental +date: 2021/08/08 +references: + - https://docs.microsoft.com/en-us/azure/role-based-access-control/resource-provider-operations +logsource: + product: azure + service: activitylogs +detection: + selection: + properties.message: + - MICROSOFT.NETWORK/VPNGATEWAYS/VPNCONNECTIONS/WRITE + - MICROSOFT.NETWORK/VPNGATEWAYS/VPNCONNECTIONS/DELETE + condition: selection +level: medium +tags: + - attack.impact +falsepositives: + - VPN Connection being modified or deleted may be performed by a system administrator. Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - VPN Connection modified or deleted from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. diff --git a/src/main/resources/rules/cloudtrail/aws_s3_data_management_tampering.yml b/src/main/resources/rules/cloudtrail/aws_s3_data_management_tampering.yml index 13e21a4bc..2080b16d0 100644 --- a/src/main/resources/rules/cloudtrail/aws_s3_data_management_tampering.yml +++ b/src/main/resources/rules/cloudtrail/aws_s3_data_management_tampering.yml @@ -1,5 +1,5 @@ title: AWS S3 Data Management Tampering -id: 78b3756a-7804-4ef7-8555-7b9024a02e2d +id: 78b3756a-7804-4ef7-8555-7b9024a02d2d description: Detects when a user tampers with S3 data management in Amazon Web Services. author: Austin Songer @austinsonger status: experimental diff --git a/src/main/resources/rules/github/github_delete_action_invoked.yml b/src/main/resources/rules/github/github_delete_action_invoked.yml new file mode 100644 index 000000000..7b8e610ba --- /dev/null +++ b/src/main/resources/rules/github/github_delete_action_invoked.yml @@ -0,0 +1,32 @@ +title: Github Delete Action Invoked +id: 16a71777-0b2e-4db7-9888-9d59cb75200b +status: experimental +description: Detects delete action in the Github audit logs for codespaces, environment, project and repo. +author: Muhammad Faisal +date: 2023/01/19 +references: + - https://docs.github.com/en/organizations/keeping-your-organization-secure/managing-security-settings-for-your-organization/reviewing-the-audit-log-for-your-organization#audit-log-actions +tags: + - attack.impact + - attack.collection + - attack.t1213.003 +logsource: + product: github + service: audit + definition: 'Requirements: The audit log streaming feature must be enabled to be able to receive such logs. You can enable following the documentation here: https://docs.github.com/en/enterprise-cloud@latest/admin/monitoring-activity-in-your-enterprise/reviewing-audit-logs-for-your-enterprise/streaming-the-audit-log-for-your-enterprise#setting-up-audit-log-streaming' +detection: + selection: + action: + - 'codespaces.delete' + - 'environment.delete' + - 'project.delete' + - 'repo.destroy' + condition: selection +fields: + - 'action' + - 'actor' + - 'org' + - 'actor_location.country_code' +falsepositives: + - Validate the deletion activity is permitted. The "actor" field need to be validated. +level: medium diff --git a/src/main/resources/rules/github/github_disable_high_risk_configuration.yml b/src/main/resources/rules/github/github_disable_high_risk_configuration.yml new file mode 100644 index 000000000..9a657fd34 --- /dev/null +++ b/src/main/resources/rules/github/github_disable_high_risk_configuration.yml @@ -0,0 +1,40 @@ +title: Github High Risk Configuration Disabled +id: 8622c92d-c00e-463c-b09d-fd06166f6794 +status: experimental +description: Detects when a user disables a critical security feature for an organization. +author: Muhammad Faisal +date: 2023/01/29 +references: + - https://docs.github.com/en/organizations/managing-oauth-access-to-your-organizations-data/disabling-oauth-app-access-restrictions-for-your-organization + - https://docs.github.com/en/organizations/keeping-your-organization-secure/managing-security-settings-for-your-organization/reviewing-the-audit-log-for-your-organization#dependabot_alerts-category-actions + - https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-security-and-analysis-settings-for-your-repository +tags: + - attack.credential_access + - attack.defense_evasion + - attack.persistence + - attack.t1556 +logsource: + product: github + service: audit + definition: 'Requirements: The audit log streaming feature must be enabled to be able to receive such logs. You can enable following the documentation here: https://docs.github.com/en/enterprise-cloud@latest/admin/monitoring-activity-in-your-enterprise/reviewing-audit-logs-for-your-enterprise/streaming-the-audit-log-for-your-enterprise#setting-up-audit-log-streaming' +detection: + selection: + action: + - 'org.disable_oauth_app_restrictions' + - 'org.disable_two_factor_requirement' + - 'repo.advanced_security_disabled' + - 'org.advanced_security_policy_selected_member_disabled' + condition: selection +fields: + - 'action' + - 'actor' + - 'org' + - 'actor_location.country_code' + - 'transport_protocol_name' + - 'repository' + - 'repo' + - 'repository_public' + - '@timestamp' +falsepositives: + - Approved administrator/owner activities. +level: high diff --git a/src/main/resources/rules/github/github_disabled_outdated_dependency_or_vulnerability.yml b/src/main/resources/rules/github/github_disabled_outdated_dependency_or_vulnerability.yml new file mode 100644 index 000000000..02052af78 --- /dev/null +++ b/src/main/resources/rules/github/github_disabled_outdated_dependency_or_vulnerability.yml @@ -0,0 +1,40 @@ +title: Outdated Dependency Or Vulnerability Alert Disabled +id: 34e1c7d4-0cd5-419d-9f1b-1dad3f61018d +status: experimental +description: | + Dependabot performs a scan to detect insecure dependencies, and sends Dependabot alerts. + This rule detects when an organization owner disables Dependabot alerts private repositories or Dependabot security updates for all repositories. +author: Muhammad Faisal +date: 2023/01/27 +references: + - https://docs.github.com/en/code-security/dependabot/dependabot-alerts/about-dependabot-alerts + - https://docs.github.com/en/organizations/keeping-your-organization-secure/managing-security-settings-for-your-organization/managing-security-and-analysis-settings-for-your-organization +tags: + - attack.initial_access + - attack.t1195.001 +logsource: + product: github + service: audit + definition: 'Requirements: The audit log streaming feature must be enabled to be able to receive such logs. You can enable following the documentation here: https://docs.github.com/en/enterprise-cloud@latest/admin/monitoring-activity-in-your-enterprise/reviewing-audit-logs-for-your-enterprise/streaming-the-audit-log-for-your-enterprise#setting-up-audit-log-streaming' +detection: + selection: + action: + - 'dependabot_alerts.disable' + - 'dependabot_alerts_new_repos.disable' + - 'dependabot_security_updates.disable' + - 'dependabot_security_updates_new_repos.disable' + - 'repository_vulnerability_alerts.disable' + condition: selection +fields: + - 'action' + - 'actor' + - 'org' + - 'actor_location.country_code' + - 'transport_protocol_name' + - 'repository' + - 'repo' + - 'repository_public' + - '@timestamp' +falsepositives: + - Approved changes by the Organization owner. Please validate the 'actor' if authorized to make the changes. +level: high diff --git a/src/main/resources/rules/github/github_new_org_member.yml b/src/main/resources/rules/github/github_new_org_member.yml new file mode 100644 index 000000000..384d64330 --- /dev/null +++ b/src/main/resources/rules/github/github_new_org_member.yml @@ -0,0 +1,34 @@ +title: New Github Organization Member Added +id: 3908d64a-3c06-4091-b503-b3a94424533b +status: experimental +description: Detects when a new member is added or invited to a github organization. +author: Muhammad Faisal +date: 2023/01/29 +references: + - https://docs.github.com/en/organizations/keeping-your-organization-secure/managing-security-settings-for-your-organization/reviewing-the-audit-log-for-your-organization#dependabot_alerts-category-actions +tags: + - attack.persistence + - attack.t1136.003 +logsource: + product: github + service: audit + definition: 'Requirements: The audit log streaming feature must be enabled to be able to receive such logs. You can enable following the documentation here: https://docs.github.com/en/enterprise-cloud@latest/admin/monitoring-activity-in-your-enterprise/reviewing-audit-logs-for-your-enterprise/streaming-the-audit-log-for-your-enterprise#setting-up-audit-log-streaming' +detection: + selection: + action: + - 'org.add_member' + - 'org.invite_member' + condition: selection +fields: + - 'action' + - 'actor' + - 'org' + - 'actor_location.country_code' + - 'transport_protocol_name' + - 'repository' + - 'repo' + - 'repository_public' + - '@timestamp' +falsepositives: + - Organization approved new members +level: informational diff --git a/src/main/resources/rules/github/github_new_secret_created.yml b/src/main/resources/rules/github/github_new_secret_created.yml new file mode 100644 index 000000000..105a8b6d0 --- /dev/null +++ b/src/main/resources/rules/github/github_new_secret_created.yml @@ -0,0 +1,34 @@ +title: Github New Secret Created +id: f9405037-bc97-4eb7-baba-167dad399b83 +status: experimental +description: Detects when a user creates action secret for the organization, environment, codespaces or repository. +author: Muhammad Faisal +date: 2023/01/20 +references: + - https://docs.github.com/en/organizations/keeping-your-organization-secure/managing-security-settings-for-your-organization/reviewing-the-audit-log-for-your-organization#audit-log-actions +tags: + - attack.defense_evasion + - attack.persistence + - attack.privilege_escalation + - attack.initial_access + - attack.t1078.004 +logsource: + product: github + service: audit + definition: 'Requirements: The audit log streaming feature must be enabled to be able to receive such logs. You can enable following the documentation here: https://docs.github.com/en/enterprise-cloud@latest/admin/monitoring-activity-in-your-enterprise/reviewing-audit-logs-for-your-enterprise/streaming-the-audit-log-for-your-enterprise#setting-up-audit-log-streaming' +detection: + selection: + action: + - 'org.create_actions_secret' + - 'environment.create_actions_secret' + - 'codespaces.create_an_org_secret' + - 'repo.create_actions_secret' + condition: selection +fields: + - 'action' + - 'actor' + - 'org' + - 'actor_location.country_code' +falsepositives: + - This detection cloud be noisy depending on the environment. It is recommended to keep a check on the new secrets when created and validate the "actor". +level: low diff --git a/src/main/resources/rules/github/github_outside_collaborator_detected.yml b/src/main/resources/rules/github/github_outside_collaborator_detected.yml new file mode 100644 index 000000000..fbd16b49e --- /dev/null +++ b/src/main/resources/rules/github/github_outside_collaborator_detected.yml @@ -0,0 +1,35 @@ +title: Github Outside Collaborator Detected +id: eaa9ac35-1730-441f-9587-25767bde99d7 +status: experimental +description: | + Detects when an organization member or an outside collaborator is added to or removed from a project board or has their permission level changed or when an owner removes an outside collaborator from an organization or when two-factor authentication is required in an organization and an outside collaborator does not use 2FA or disables 2FA. +author: Muhammad Faisal +date: 2023/01/20 +references: + - https://docs.github.com/en/organizations/keeping-your-organization-secure/managing-security-settings-for-your-organization/reviewing-the-audit-log-for-your-organization#audit-log-actions + - https://docs.github.com/en/organizations/keeping-your-organization-secure/managing-two-factor-authentication-for-your-organization/requiring-two-factor-authentication-in-your-organization +tags: + - attack.persistence + - attack.collection + - attack.t1098.001 + - attack.t1098.003 + - attack.t1213.003 +logsource: + product: github + service: audit + definition: 'Requirements: The audit log streaming feature must be enabled to be able to receive such logs. You can enable following the documentation here: https://docs.github.com/en/enterprise-cloud@latest/admin/monitoring-activity-in-your-enterprise/reviewing-audit-logs-for-your-enterprise/streaming-the-audit-log-for-your-enterprise#setting-up-audit-log-streaming' +detection: + selection: + action: + - 'project.update_user_permission' + - 'org.remove_outside_collaborator' + condition: selection +fields: + - 'action' + - 'actor' + - 'org' + - 'actor_location.country_code' +falsepositives: + - Validate the actor if permitted to access the repo. + - Validate the Multifactor Authentication changes. +level: medium diff --git a/src/main/resources/rules/github/github_self_hosted_runner_changes_detected.yml b/src/main/resources/rules/github/github_self_hosted_runner_changes_detected.yml new file mode 100644 index 000000000..7dc420524 --- /dev/null +++ b/src/main/resources/rules/github/github_self_hosted_runner_changes_detected.yml @@ -0,0 +1,55 @@ +title: Github Self Hosted Runner Changes Detected +id: f8ed0e8f-7438-4b79-85eb-f358ef2fbebd +status: experimental +description: | + A self-hosted runner is a system that you deploy and manage to execute jobs from GitHub Actions on GitHub.com. + This rule detects changes to self-hosted runners configurations in the environment. The self-hosted runner configuration changes once detected, + it should be validated from GitHub UI because the log entry may not provide full context. +author: Muhammad Faisal +date: 2023/01/27 +references: + - https://docs.github.com/en/actions/hosting-your-own-runners/about-self-hosted-runners#about-self-hosted-runners + - https://docs.github.com/en/organizations/keeping-your-organization-secure/managing-security-settings-for-your-organization/reviewing-the-audit-log-for-your-organization#search-based-on-operation +tags: + - attack.impact + - attack.discovery + - attack.collection + - attack.defense_evasion + - attack.persistence + - attack.privilege_escalation + - attack.initial_access + - attack.t1526 + - attack.t1213.003 + - attack.t1078.004 +logsource: + product: github + service: audit + definition: 'Requirements: The audit log streaming feature must be enabled to be able to receive such logs. You can enable following the documentation here: https://docs.github.com/en/enterprise-cloud@latest/admin/monitoring-activity-in-your-enterprise/reviewing-audit-logs-for-your-enterprise/streaming-the-audit-log-for-your-enterprise#setting-up-audit-log-streaming' +detection: + selection: + action: + - 'org.remove_self_hosted_runner' + - 'org.runner_group_created' + - 'org.runner_group_removed' + - 'org.runner_group_updated' + - 'org.runner_group_runners_added' + - 'org.runner_group_runner_removed' + - 'org.runner_group_runners_updated' + - 'repo.register_self_hosted_runner' + - 'repo.remove_self_hosted_runner' + condition: selection +fields: + - 'action' + - 'actor' + - 'org' + - 'actor_location.country_code' + - 'transport_protocol_name' + - 'repository' + - 'repo' + - 'repository_public' + - '@timestamp' +falsepositives: + - Allowed self-hosted runners changes in the environment. + - A self-hosted runner is automatically removed from GitHub if it has not connected to GitHub Actions for more than 14 days. + - An ephemeral self-hosted runner is automatically removed from GitHub if it has not connected to GitHub Actions for more than 1 day. +level: low diff --git a/src/main/resources/rules/gworkspace/gworkspace_application_removed.yml b/src/main/resources/rules/gworkspace/gworkspace_application_removed.yml new file mode 100644 index 000000000..9f0a63994 --- /dev/null +++ b/src/main/resources/rules/gworkspace/gworkspace_application_removed.yml @@ -0,0 +1,26 @@ +title: Google Workspace Application Removed +id: ee2803f0-71c8-4831-b48b-a1fc57601ee4 +status: test +description: Detects when an an application is removed from Google Workspace. +references: + - https://cloud.google.com/logging/docs/audit/gsuite-audit-logging#3 + - https://developers.google.com/admin-sdk/reports/v1/appendix/activity/admin-domain-settings?hl=en#REMOVE_APPLICATION + - https://developers.google.com/admin-sdk/reports/v1/appendix/activity/admin-domain-settings?hl=en#REMOVE_APPLICATION_FROM_WHITELIST +author: Austin Songer +date: 2021/08/26 +modified: 2022/10/09 +tags: + - attack.impact +logsource: + product: google_workspace + service: google_workspace.admin +detection: + selection: + eventService: admin.googleapis.com + eventName: + - REMOVE_APPLICATION + - REMOVE_APPLICATION_FROM_WHITELIST + condition: selection +falsepositives: + - Application being removed may be performed by a System Administrator. +level: medium diff --git a/src/main/resources/rules/gworkspace/gworkspace_granted_domain_api_access.yml b/src/main/resources/rules/gworkspace/gworkspace_granted_domain_api_access.yml new file mode 100644 index 000000000..ea14ab20b --- /dev/null +++ b/src/main/resources/rules/gworkspace/gworkspace_granted_domain_api_access.yml @@ -0,0 +1,25 @@ +title: Google Workspace Granted Domain API Access +id: 04e2a23a-9b29-4a5c-be3a-3542e3f982ba +status: test +description: Detects when an API access service account is granted domain authority. +references: + - https://cloud.google.com/logging/docs/audit/gsuite-audit-logging#3 + - https://developers.google.com/admin-sdk/reports/v1/appendix/activity/admin-domain-settings#AUTHORIZE_API_CLIENT_ACCESS +author: Austin Songer +date: 2021/08/23 +modified: 2022/10/09 +tags: + - attack.persistence + - attack.t1098 +logsource: + product: google_workspace + service: google_workspace.admin +detection: + selection: + eventService: admin.googleapis.com + eventName: AUTHORIZE_API_CLIENT_ACCESS + condition: selection +falsepositives: + - Unknown + +level: medium diff --git a/src/main/resources/rules/gworkspace/gworkspace_mfa_disabled.yml b/src/main/resources/rules/gworkspace/gworkspace_mfa_disabled.yml new file mode 100644 index 000000000..f5e988115 --- /dev/null +++ b/src/main/resources/rules/gworkspace/gworkspace_mfa_disabled.yml @@ -0,0 +1,28 @@ +title: Google Workspace MFA Disabled +id: 780601d1-6376-4f2a-884e-b8d45599f78c +status: test +description: Detects when multi-factor authentication (MFA) is disabled. +references: + - https://cloud.google.com/logging/docs/audit/gsuite-audit-logging#3 + - https://developers.google.com/admin-sdk/reports/v1/appendix/activity/admin-security-settings#ENFORCE_STRONG_AUTHENTICATION + - https://developers.google.com/admin-sdk/reports/v1/appendix/activity/admin-security-settings?hl=en#ALLOW_STRONG_AUTHENTICATION +author: Austin Songer +date: 2021/08/26 +modified: 2022/12/25 +tags: + - attack.impact +logsource: + product: google_workspace + service: google_workspace.admin +detection: + selection_base: + eventService: admin.googleapis.com + eventName: + - ENFORCE_STRONG_AUTHENTICATION + - ALLOW_STRONG_AUTHENTICATION + selection_eventValue: + new_value: 'false' + condition: all of selection* +falsepositives: + - MFA may be disabled and performed by a system administrator. +level: medium diff --git a/src/main/resources/rules/gworkspace/gworkspace_role_modified_or_deleted.yml b/src/main/resources/rules/gworkspace/gworkspace_role_modified_or_deleted.yml new file mode 100644 index 000000000..73f7a484a --- /dev/null +++ b/src/main/resources/rules/gworkspace/gworkspace_role_modified_or_deleted.yml @@ -0,0 +1,27 @@ +title: Google Workspace Role Modified or Deleted +id: 6aef64e3-60c6-4782-8db3-8448759c714e +status: test +description: Detects when an a role is modified or deleted in Google Workspace. +references: + - https://cloud.google.com/logging/docs/audit/gsuite-audit-logging#3 + - https://developers.google.com/admin-sdk/reports/v1/appendix/activity/admin-delegated-admin-settings +author: Austin Songer +date: 2021/08/24 +modified: 2022/10/09 +tags: + - attack.impact +logsource: + product: google_workspace + service: google_workspace.admin +detection: + selection: + eventService: admin.googleapis.com + eventName: + - DELETE_ROLE + - RENAME_ROLE + - UPDATE_ROLE + condition: selection +falsepositives: + - Unknown + +level: medium diff --git a/src/main/resources/rules/gworkspace/gworkspace_role_privilege_deleted.yml b/src/main/resources/rules/gworkspace/gworkspace_role_privilege_deleted.yml new file mode 100644 index 000000000..3ea2480b6 --- /dev/null +++ b/src/main/resources/rules/gworkspace/gworkspace_role_privilege_deleted.yml @@ -0,0 +1,24 @@ +title: Google Workspace Role Privilege Deleted +id: bf638ef7-4d2d-44bb-a1dc-a238252e6267 +status: test +description: Detects when an a role privilege is deleted in Google Workspace. +references: + - https://cloud.google.com/logging/docs/audit/gsuite-audit-logging#3 + - https://developers.google.com/admin-sdk/reports/v1/appendix/activity/admin-delegated-admin-settings +author: Austin Songer +date: 2021/08/24 +modified: 2022/10/09 +tags: + - attack.impact +logsource: + product: google_workspace + service: google_workspace.admin +detection: + selection: + eventService: admin.googleapis.com + eventName: REMOVE_PRIVILEGE + condition: selection +falsepositives: + - Unknown + +level: medium diff --git a/src/main/resources/rules/gworkspace/gworkspace_user_granted_admin_privileges.yml b/src/main/resources/rules/gworkspace/gworkspace_user_granted_admin_privileges.yml new file mode 100644 index 000000000..08e4b4b68 --- /dev/null +++ b/src/main/resources/rules/gworkspace/gworkspace_user_granted_admin_privileges.yml @@ -0,0 +1,26 @@ +title: Google Workspace User Granted Admin Privileges +id: 2d1b83e4-17c6-4896-a37b-29140b40a788 +status: test +description: Detects when an Google Workspace user is granted admin privileges. +references: + - https://cloud.google.com/logging/docs/audit/gsuite-audit-logging#3 + - https://developers.google.com/admin-sdk/reports/v1/appendix/activity/admin-user-settings#GRANT_ADMIN_PRIVILEGE +author: Austin Songer +date: 2021/08/23 +modified: 2022/10/09 +tags: + - attack.persistence + - attack.t1098 +logsource: + product: google_workspace + service: google_workspace.admin +detection: + selection: + eventService: admin.googleapis.com + eventName: + - GRANT_DELEGATED_ADMIN_PRIVILEGES + - GRANT_ADMIN_PRIVILEGE + condition: selection +falsepositives: + - Google Workspace admin role privileges, may be modified by system administrators. +level: medium diff --git a/src/main/resources/rules/m365/microsoft365_activity_by_terminated_user.yml b/src/main/resources/rules/m365/microsoft365_activity_by_terminated_user.yml new file mode 100644 index 000000000..70d11dba8 --- /dev/null +++ b/src/main/resources/rules/m365/microsoft365_activity_by_terminated_user.yml @@ -0,0 +1,26 @@ +title: Activity Performed by Terminated User +id: 2e669ed8-742e-4fe5-b3c4-5a59b486c2ee +status: test +description: | + Detects when a Microsoft Cloud App Security reported for users whose account were terminated in Azure AD, but still perform activities in other platforms such as AWS or Salesforce. + This is especially relevant for users who use another account to manage resources, since these accounts are often not terminated when a user leaves the company. +references: + - https://docs.microsoft.com/en-us/cloud-app-security/anomaly-detection-policy + - https://docs.microsoft.com/en-us/cloud-app-security/policy-template-reference +author: Austin Songer @austinsonger +date: 2021/08/23 +modified: 2022/10/09 +tags: + - attack.impact +logsource: + service: threat_management + product: m365 +detection: + selection: + eventSource: SecurityComplianceCenter + eventName: 'Activity performed by terminated user' + status: success + condition: selection +falsepositives: + - Unknown +level: medium diff --git a/src/main/resources/rules/m365/microsoft365_activity_from_anonymous_ip_addresses.yml b/src/main/resources/rules/m365/microsoft365_activity_from_anonymous_ip_addresses.yml new file mode 100644 index 000000000..029f859f8 --- /dev/null +++ b/src/main/resources/rules/m365/microsoft365_activity_from_anonymous_ip_addresses.yml @@ -0,0 +1,25 @@ +title: Activity from Anonymous IP Addresses +id: d8b0a4fe-07a8-41be-bd39-b14afa025d95 +status: test +description: Detects when a Microsoft Cloud App Security reported when users were active from an IP address that has been identified as an anonymous proxy IP address. +references: + - https://docs.microsoft.com/en-us/cloud-app-security/anomaly-detection-policy + - https://docs.microsoft.com/en-us/cloud-app-security/policy-template-reference +author: Austin Songer @austinsonger +date: 2021/08/23 +modified: 2022/10/09 +tags: + - attack.command_and_control + - attack.t1573 +logsource: + service: threat_management + product: m365 +detection: + selection: + eventSource: SecurityComplianceCenter + eventName: 'Activity from anonymous IP addresses' + status: success + condition: selection +falsepositives: + - User using a VPN or Proxy +level: medium diff --git a/src/main/resources/rules/m365/microsoft365_activity_from_infrequent_country.yml b/src/main/resources/rules/m365/microsoft365_activity_from_infrequent_country.yml new file mode 100644 index 000000000..01002c7b6 --- /dev/null +++ b/src/main/resources/rules/m365/microsoft365_activity_from_infrequent_country.yml @@ -0,0 +1,25 @@ +title: Activity from Infrequent Country +id: 0f2468a2-5055-4212-a368-7321198ee706 +status: test +description: Detects when a Microsoft Cloud App Security reported when an activity occurs from a location that wasn't recently or never visited by any user in the organization. +references: + - https://docs.microsoft.com/en-us/cloud-app-security/anomaly-detection-policy + - https://docs.microsoft.com/en-us/cloud-app-security/policy-template-reference +author: Austin Songer @austinsonger +date: 2021/08/23 +modified: 2022/10/09 +tags: + - attack.command_and_control + - attack.t1573 +logsource: + service: threat_management + product: m365 +detection: + selection: + eventSource: SecurityComplianceCenter + eventName: 'Activity from infrequent country' + status: success + condition: selection +falsepositives: + - Unknown +level: medium diff --git a/src/main/resources/rules/m365/microsoft365_data_exfiltration_to_unsanctioned_app.yml b/src/main/resources/rules/m365/microsoft365_data_exfiltration_to_unsanctioned_app.yml new file mode 100644 index 000000000..9453776be --- /dev/null +++ b/src/main/resources/rules/m365/microsoft365_data_exfiltration_to_unsanctioned_app.yml @@ -0,0 +1,25 @@ +title: Data Exfiltration to Unsanctioned Apps +id: 2b669496-d215-47d8-bd9a-f4a45bf07cda +status: test +description: Detects when a Microsoft Cloud App Security reported when a user or IP address uses an app that is not sanctioned to perform an activity that resembles an attempt to exfiltrate information from your organization. +references: + - https://docs.microsoft.com/en-us/cloud-app-security/anomaly-detection-policy + - https://docs.microsoft.com/en-us/cloud-app-security/policy-template-reference +author: Austin Songer @austinsonger +date: 2021/08/23 +modified: 2022/10/09 +tags: + - attack.exfiltration + - attack.t1537 +logsource: + service: threat_management + product: m365 +detection: + selection: + eventSource: SecurityComplianceCenter + eventName: 'Data exfiltration to unsanctioned apps' + status: success + condition: selection +falsepositives: + - Unknown +level: medium diff --git a/src/main/resources/rules/m365/microsoft365_from_susp_ip_addresses.yml b/src/main/resources/rules/m365/microsoft365_from_susp_ip_addresses.yml new file mode 100644 index 000000000..343b5466d --- /dev/null +++ b/src/main/resources/rules/m365/microsoft365_from_susp_ip_addresses.yml @@ -0,0 +1,27 @@ +title: Activity from Suspicious IP Addresses +id: a3501e8e-af9e-43c6-8cd6-9360bdaae498 +status: test +description: | + Detects when a Microsoft Cloud App Security reported users were active from an IP address identified as risky by Microsoft Threat Intelligence. + These IP addresses are involved in malicious activities, such as Botnet C&C, and may indicate compromised account. +references: + - https://docs.microsoft.com/en-us/cloud-app-security/anomaly-detection-policy + - https://docs.microsoft.com/en-us/cloud-app-security/policy-template-reference +author: Austin Songer @austinsonger +date: 2021/08/23 +modified: 2022/10/09 +tags: + - attack.command_and_control + - attack.t1573 +logsource: + service: threat_detection + product: m365 +detection: + selection: + eventSource: SecurityComplianceCenter + eventName: 'Activity from suspicious IP addresses' + status: success + condition: selection +falsepositives: + - Unknown +level: medium diff --git a/src/main/resources/rules/m365/microsoft365_impossible_travel_activity.yml b/src/main/resources/rules/m365/microsoft365_impossible_travel_activity.yml new file mode 100644 index 000000000..14b3c7700 --- /dev/null +++ b/src/main/resources/rules/m365/microsoft365_impossible_travel_activity.yml @@ -0,0 +1,25 @@ +title: Microsoft 365 - Impossible Travel Activity +id: d7eab125-5f94-43df-8710-795b80fa1189 +status: test +description: Detects when a Microsoft Cloud App Security reported a risky sign-in attempt due to a login associated with an impossible travel. +references: + - https://docs.microsoft.com/en-us/cloud-app-security/anomaly-detection-policy + - https://docs.microsoft.com/en-us/cloud-app-security/policy-template-reference +author: Austin Songer @austinsonger +date: 2020/07/06 +modified: 2021/11/27 +tags: + - attack.initial_access + - attack.t1078 +logsource: + service: threat_management + product: m365 +detection: + selection: + eventSource: SecurityComplianceCenter + eventName: 'Impossible travel activity' + status: success + condition: selection +falsepositives: + - Unknown +level: medium diff --git a/src/main/resources/rules/m365/microsoft365_logon_from_risky_ip_address.yml b/src/main/resources/rules/m365/microsoft365_logon_from_risky_ip_address.yml new file mode 100644 index 000000000..2ba14c9d4 --- /dev/null +++ b/src/main/resources/rules/m365/microsoft365_logon_from_risky_ip_address.yml @@ -0,0 +1,25 @@ +title: Logon from a Risky IP Address +id: c191e2fa-f9d6-4ccf-82af-4f2aba08359f +status: test +description: Detects when a Microsoft Cloud App Security reported when a user signs into your sanctioned apps from a risky IP address. +references: + - https://docs.microsoft.com/en-us/cloud-app-security/anomaly-detection-policy + - https://docs.microsoft.com/en-us/cloud-app-security/policy-template-reference +author: Austin Songer @austinsonger +date: 2021/08/23 +modified: 2022/10/09 +tags: + - attack.initial_access + - attack.t1078 +logsource: + service: threat_management + product: m365 +detection: + selection: + eventSource: SecurityComplianceCenter + eventName: 'Log on from a risky IP address' + status: success + condition: selection +falsepositives: + - Unknown +level: medium diff --git a/src/main/resources/rules/m365/microsoft365_new_federated_domain_added.yml b/src/main/resources/rules/m365/microsoft365_new_federated_domain_added.yml new file mode 100644 index 000000000..0218141b3 --- /dev/null +++ b/src/main/resources/rules/m365/microsoft365_new_federated_domain_added.yml @@ -0,0 +1,27 @@ +title: New Federated Domain Added +id: 42127bdd-9133-474f-a6f1-97b6c08a4339 +status: test +description: Alert for the addition of a new federated domain. +references: + - https://www.fireeye.com/content/dam/fireeye-www/blog/pdfs/wp-m-unc2452-2021-000343-01.pdf + - https://us-cert.cisa.gov/ncas/alerts/aa21-008a + - https://www.splunk.com/en_us/blog/security/a-golden-saml-journey-solarwinds-continued.html + - https://www.sygnia.co/golden-saml-advisory + - https://o365blog.com/post/aadbackdoor/ +author: '@ionsor' +date: 2022/02/08 +tags: + - attack.persistence + - attack.t1136.003 +logsource: + service: exchange + product: m365 +detection: + selection: + eventSource: Exchange + eventName: 'Add-FederatedDomain' + status: success + condition: selection +falsepositives: + - The creation of a new Federated domain is not necessarily malicious, however these events need to be followed closely, as it may indicate federated credential abuse or backdoor via federated identities at a similar or different cloud provider. +level: medium diff --git a/src/main/resources/rules/m365/microsoft365_potential_ransomware_activity.yml b/src/main/resources/rules/m365/microsoft365_potential_ransomware_activity.yml new file mode 100644 index 000000000..6ca1f523b --- /dev/null +++ b/src/main/resources/rules/m365/microsoft365_potential_ransomware_activity.yml @@ -0,0 +1,25 @@ +title: Microsoft 365 - Potential Ransomware Activity +id: bd132164-884a-48f1-aa2d-c6d646b04c69 +status: test +description: Detects when a Microsoft Cloud App Security reported when a user uploads files to the cloud that might be infected with ransomware. +references: + - https://docs.microsoft.com/en-us/cloud-app-security/anomaly-detection-policy + - https://docs.microsoft.com/en-us/cloud-app-security/policy-template-reference +author: austinsonger +date: 2021/08/19 +modified: 2022/10/09 +tags: + - attack.impact + - attack.t1486 +logsource: + service: threat_management + product: m365 +detection: + selection: + eventSource: SecurityComplianceCenter + eventName: 'Potential ransomware activity' + status: success + condition: selection +falsepositives: + - Unknown +level: medium diff --git a/src/main/resources/rules/m365/microsoft365_pst_export_alert.yml b/src/main/resources/rules/m365/microsoft365_pst_export_alert.yml new file mode 100644 index 000000000..03c2e2309 --- /dev/null +++ b/src/main/resources/rules/m365/microsoft365_pst_export_alert.yml @@ -0,0 +1,28 @@ +title: PST Export Alert Using eDiscovery Alert +id: 18b88d08-d73e-4f21-bc25-4b9892a4fdd0 +related: + - id: 6897cd82-6664-11ed-9022-0242ac120002 + type: similar +status: experimental +description: Alert on when a user has performed an eDiscovery search or exported a PST file from the search. This PST file usually has sensitive information including email body content +references: + - https://learn.microsoft.com/en-us/microsoft-365/compliance/alert-policies?view=o365-worldwide +author: Sorina Ionescu +date: 2022/02/08 +modified: 2022/11/17 +tags: + - attack.collection + - attack.t1114 +logsource: + service: threat_management + product: m365 + definition: Requires the 'eDiscovery search or exported' alert to be enabled +detection: + selection: + eventSource: SecurityComplianceCenter + eventName: 'eDiscovery search started or exported' + status: success + condition: selection +falsepositives: + - PST export can be done for legitimate purposes but due to the sensitive nature of its content it must be monitored. +level: medium diff --git a/src/main/resources/rules/m365/microsoft365_pst_export_alert_using_new_compliancesearchaction.yml b/src/main/resources/rules/m365/microsoft365_pst_export_alert_using_new_compliancesearchaction.yml new file mode 100644 index 000000000..58e939a46 --- /dev/null +++ b/src/main/resources/rules/m365/microsoft365_pst_export_alert_using_new_compliancesearchaction.yml @@ -0,0 +1,28 @@ +title: PST Export Alert Using New-ComplianceSearchAction +id: 6897cd82-6664-11ed-9022-0242ac120002 +related: + - id: 18b88d08-d73e-4f21-bc25-4b9892a4fdd0 + type: similar +status: experimental +description: Alert when a user has performed an export to a search using 'New-ComplianceSearchAction' with the '-Export' flag. This detection will detect PST export even if the 'eDiscovery search or exported' alert is disabled in the O365.This rule will apply to ExchangePowerShell usage and from the cloud. +references: + - https://learn.microsoft.com/en-us/powershell/module/exchange/new-compliancesearchaction?view=exchange-ps +author: Nikita Khalimonenkov +date: 2022/11/17 +tags: + - attack.collection + - attack.t1114 +logsource: + service: threat_management + product: m365 +detection: + selection: + eventSource: SecurityComplianceCenter + Payload|contains|all: + - 'New-ComplianceSearchAction' + - 'Export' + - 'pst' + condition: selection +falsepositives: + - Exporting a PST can be done for legitimate purposes by legitimate sources, but due to the sensitive nature of PST content, it must be monitored. +level: medium diff --git a/src/main/resources/rules/m365/microsoft365_susp_inbox_forwarding.yml b/src/main/resources/rules/m365/microsoft365_susp_inbox_forwarding.yml new file mode 100644 index 000000000..f7f74a5dd --- /dev/null +++ b/src/main/resources/rules/m365/microsoft365_susp_inbox_forwarding.yml @@ -0,0 +1,25 @@ +title: Suspicious Inbox Forwarding +id: 6c220477-0b5b-4b25-bb90-66183b4089e8 +status: test +description: Detects when a Microsoft Cloud App Security reported suspicious email forwarding rules, for example, if a user created an inbox rule that forwards a copy of all emails to an external address. +references: + - https://docs.microsoft.com/en-us/cloud-app-security/anomaly-detection-policy + - https://docs.microsoft.com/en-us/cloud-app-security/policy-template-reference +author: Austin Songer @austinsonger +date: 2021/08/22 +modified: 2022/10/09 +tags: + - attack.exfiltration + - attack.t1020 +logsource: + service: threat_management + product: m365 +detection: + selection: + eventSource: SecurityComplianceCenter + eventName: 'Suspicious inbox forwarding' + status: success + condition: selection +falsepositives: + - Unknown +level: low diff --git a/src/main/resources/rules/m365/microsoft365_susp_oauth_app_file_download_activities.yml b/src/main/resources/rules/m365/microsoft365_susp_oauth_app_file_download_activities.yml new file mode 100644 index 000000000..d6dc40733 --- /dev/null +++ b/src/main/resources/rules/m365/microsoft365_susp_oauth_app_file_download_activities.yml @@ -0,0 +1,24 @@ +title: Suspicious OAuth App File Download Activities +id: ee111937-1fe7-40f0-962a-0eb44d57d174 +status: test +description: Detects when a Microsoft Cloud App Security reported when an app downloads multiple files from Microsoft SharePoint or Microsoft OneDrive in a manner that is unusual for the user. +references: + - https://docs.microsoft.com/en-us/cloud-app-security/anomaly-detection-policy + - https://docs.microsoft.com/en-us/cloud-app-security/policy-template-reference +author: Austin Songer @austinsonger +date: 2021/08/23 +modified: 2022/10/09 +tags: + - attack.exfiltration +logsource: + service: threat_management + product: m365 +detection: + selection: + eventSource: SecurityComplianceCenter + eventName: 'Suspicious OAuth app file download activities' + status: success + condition: selection +falsepositives: + - Unknown +level: medium diff --git a/src/main/resources/rules/m365/microsoft365_unusual_volume_of_file_deletion.yml b/src/main/resources/rules/m365/microsoft365_unusual_volume_of_file_deletion.yml new file mode 100644 index 000000000..ff0b26d54 --- /dev/null +++ b/src/main/resources/rules/m365/microsoft365_unusual_volume_of_file_deletion.yml @@ -0,0 +1,25 @@ +title: Microsoft 365 - Unusual Volume of File Deletion +id: 78a34b67-3c39-4886-8fb4-61c46dc18ecd +status: test +description: Detects when a Microsoft Cloud App Security reported a user has deleted a unusual a large volume of files. +references: + - https://docs.microsoft.com/en-us/cloud-app-security/anomaly-detection-policy + - https://docs.microsoft.com/en-us/cloud-app-security/policy-template-reference +author: austinsonger +date: 2021/08/19 +modified: 2022/10/09 +tags: + - attack.impact + - attack.t1485 +logsource: + service: threat_management + product: m365 +detection: + selection: + eventSource: SecurityComplianceCenter + eventName: 'Unusual volume of file deletion' + status: success + condition: selection +falsepositives: + - Unknown +level: medium diff --git a/src/main/resources/rules/m365/microsoft365_user_restricted_from_sending_email.yml b/src/main/resources/rules/m365/microsoft365_user_restricted_from_sending_email.yml new file mode 100644 index 000000000..218f9d3e2 --- /dev/null +++ b/src/main/resources/rules/m365/microsoft365_user_restricted_from_sending_email.yml @@ -0,0 +1,25 @@ +title: Microsoft 365 - User Restricted from Sending Email +id: ff246f56-7f24-402a-baca-b86540e3925c +status: test +description: Detects when a Security Compliance Center reported a user who exceeded sending limits of the service policies and because of this has been restricted from sending email. +references: + - https://docs.microsoft.com/en-us/cloud-app-security/anomaly-detection-policy + - https://docs.microsoft.com/en-us/cloud-app-security/policy-template-reference +author: austinsonger +date: 2021/08/19 +modified: 2022/10/09 +tags: + - attack.initial_access + - attack.t1199 +logsource: + service: threat_management + product: m365 +detection: + selection: + eventSource: SecurityComplianceCenter + eventName: 'User restricted from sending email' + status: success + condition: selection +falsepositives: + - Unknown +level: medium diff --git a/src/main/resources/rules/okta/okta_admin_role_assigned_to_user_or_group.yml b/src/main/resources/rules/okta/okta_admin_role_assigned_to_user_or_group.yml new file mode 100644 index 000000000..5a0372669 --- /dev/null +++ b/src/main/resources/rules/okta/okta_admin_role_assigned_to_user_or_group.yml @@ -0,0 +1,26 @@ +title: Okta Admin Role Assigned to an User or Group +id: 413d4a81-6c98-4479-9863-014785fd579c +status: test +description: Detects when an the Administrator role is assigned to an user or group. +references: + - https://developer.okta.com/docs/reference/api/system-log/ + - https://developer.okta.com/docs/reference/api/event-types/ +author: Austin Songer @austinsonger +date: 2021/09/12 +modified: 2022/10/09 +tags: + - attack.persistence + - attack.t1098.003 +logsource: + product: okta + service: okta +detection: + selection: + eventtype: + - group.privilege.grant + - user.account.privilege.grant + condition: selection +falsepositives: + - Administrator roles could be assigned to users or group by other admin users. + +level: medium diff --git a/src/main/resources/rules/okta/okta_admin_role_assignment_created.yml b/src/main/resources/rules/okta/okta_admin_role_assignment_created.yml new file mode 100644 index 000000000..f8fa20391 --- /dev/null +++ b/src/main/resources/rules/okta/okta_admin_role_assignment_created.yml @@ -0,0 +1,21 @@ +title: Okta Admin Role Assignment Created +id: 139bdd4b-9cd7-49ba-a2f4-744d0a8f5d8c +status: experimental +description: Detects when a new admin role assignment is created. Which could be a sign of privilege escalation or persistence +references: + - https://developer.okta.com/docs/reference/api/system-log/ + - https://developer.okta.com/docs/reference/api/event-types/ +author: Nikita Khalimonenkov +date: 2023/01/19 +tags: + - attack.persistence +logsource: + product: okta + service: okta +detection: + selection: + eventtype: 'iam.resourceset.bindings.add' + condition: selection +falsepositives: + - Legitimate creation of a new admin role assignment +level: medium diff --git a/src/main/resources/rules/okta/okta_api_token_created.yml b/src/main/resources/rules/okta/okta_api_token_created.yml new file mode 100644 index 000000000..b2e259f85 --- /dev/null +++ b/src/main/resources/rules/okta/okta_api_token_created.yml @@ -0,0 +1,22 @@ +title: Okta API Token Created +id: 19951c21-229d-4ccb-8774-b993c3ff3c5c +status: test +description: Detects when a API token is created +references: + - https://developer.okta.com/docs/reference/api/system-log/ + - https://developer.okta.com/docs/reference/api/event-types/ +author: Austin Songer @austinsonger +date: 2021/09/12 +modified: 2022/10/09 +tags: + - attack.persistence +logsource: + product: okta + service: okta +detection: + selection: + eventtype: system.api_token.create + condition: selection +falsepositives: + - Legitimate creation of an API token by authorized users +level: medium diff --git a/src/main/resources/rules/okta/okta_api_token_revoked.yml b/src/main/resources/rules/okta/okta_api_token_revoked.yml new file mode 100644 index 000000000..e57121bfa --- /dev/null +++ b/src/main/resources/rules/okta/okta_api_token_revoked.yml @@ -0,0 +1,23 @@ +title: Okta API Token Revoked +id: cf1dbc6b-6205-41b4-9b88-a83980d2255b +status: test +description: Detects when a API Token is revoked. +references: + - https://developer.okta.com/docs/reference/api/system-log/ + - https://developer.okta.com/docs/reference/api/event-types/ +author: Austin Songer @austinsonger +date: 2021/09/12 +modified: 2022/10/09 +tags: + - attack.impact +logsource: + product: okta + service: okta +detection: + selection: + eventtype: system.api_token.revoke + condition: selection +falsepositives: + - Unknown + +level: medium diff --git a/src/main/resources/rules/okta/okta_application_modified_or_deleted.yml b/src/main/resources/rules/okta/okta_application_modified_or_deleted.yml new file mode 100644 index 000000000..800cb8698 --- /dev/null +++ b/src/main/resources/rules/okta/okta_application_modified_or_deleted.yml @@ -0,0 +1,25 @@ +title: Okta Application Modified or Deleted +id: 7899144b-e416-4c28-b0b5-ab8f9e0a541d +status: test +description: Detects when an application is modified or deleted. +references: + - https://developer.okta.com/docs/reference/api/system-log/ + - https://developer.okta.com/docs/reference/api/event-types/ +author: Austin Songer @austinsonger +date: 2021/09/12 +modified: 2022/10/09 +tags: + - attack.impact +logsource: + product: okta + service: okta +detection: + selection: + eventtype: + - application.lifecycle.update + - application.lifecycle.delete + condition: selection +falsepositives: + - Unknown + +level: medium diff --git a/src/main/resources/rules/okta/okta_application_sign_on_policy_modified_or_deleted.yml b/src/main/resources/rules/okta/okta_application_sign_on_policy_modified_or_deleted.yml new file mode 100644 index 000000000..8d77d6eb5 --- /dev/null +++ b/src/main/resources/rules/okta/okta_application_sign_on_policy_modified_or_deleted.yml @@ -0,0 +1,24 @@ +title: Okta Application Sign-On Policy Modified or Deleted +id: 8f668cc4-c18e-45fe-ad00-624a981cf88a +status: test +description: Detects when an application Sign-on Policy is modified or deleted. +references: + - https://developer.okta.com/docs/reference/api/system-log/ + - https://developer.okta.com/docs/reference/api/event-types/ +author: Austin Songer @austinsonger +date: 2021/09/12 +modified: 2022/10/09 +tags: + - attack.impact +logsource: + product: okta + service: okta +detection: + selection: + eventtype: + - application.policy.sign_on.update + - application.policy.sign_on.rule.delete + condition: selection +falsepositives: + - Unknown +level: medium diff --git a/src/main/resources/rules/okta/okta_mfa_reset_or_deactivated.yml b/src/main/resources/rules/okta/okta_mfa_reset_or_deactivated.yml new file mode 100644 index 000000000..2ffd5a7cf --- /dev/null +++ b/src/main/resources/rules/okta/okta_mfa_reset_or_deactivated.yml @@ -0,0 +1,27 @@ +title: Okta MFA Reset or Deactivated +id: 50e068d7-1e6b-4054-87e5-0a592c40c7e0 +status: test +description: Detects when an attempt at deactivating or resetting MFA. +references: + - https://developer.okta.com/docs/reference/api/system-log/ + - https://developer.okta.com/docs/reference/api/event-types/ +author: Austin Songer @austinsonger +date: 2021/09/21 +modified: 2022/10/09 +tags: + - attack.persistence + - attack.credential_access + - attack.defense_evasion + - attack.t1556.006 +logsource: + product: okta + service: okta +detection: + selection: + eventtype: + - user.mfa.factor.deactivate + - user.mfa.factor.reset_all + condition: selection +falsepositives: + - If a MFA reset or deactivated was performed by a system administrator. +level: medium diff --git a/src/main/resources/rules/okta/okta_network_zone_deactivated_or_deleted.yml b/src/main/resources/rules/okta/okta_network_zone_deactivated_or_deleted.yml new file mode 100644 index 000000000..5e348ee53 --- /dev/null +++ b/src/main/resources/rules/okta/okta_network_zone_deactivated_or_deleted.yml @@ -0,0 +1,25 @@ +title: Okta Network Zone Deactivated or Deleted +id: 9f308120-69ed-4506-abde-ac6da81f4310 +status: test +description: Detects when an Network Zone is Deactivated or Deleted. +references: + - https://developer.okta.com/docs/reference/api/system-log/ + - https://developer.okta.com/docs/reference/api/event-types/ +author: Austin Songer @austinsonger +date: 2021/09/12 +modified: 2022/10/09 +tags: + - attack.impact +logsource: + product: okta + service: okta +detection: + selection: + eventtype: + - zone.deactivate + - zone.delete + condition: selection +falsepositives: + - Unknown + +level: medium diff --git a/src/main/resources/rules/okta/okta_policy_modified_or_deleted.yml b/src/main/resources/rules/okta/okta_policy_modified_or_deleted.yml new file mode 100644 index 000000000..547fcadcd --- /dev/null +++ b/src/main/resources/rules/okta/okta_policy_modified_or_deleted.yml @@ -0,0 +1,26 @@ +title: Okta Policy Modified or Deleted +id: 1667a172-ed4c-463c-9969-efd92195319a +status: test +description: Detects when an Okta policy is modified or deleted. +references: + - https://developer.okta.com/docs/reference/api/system-log/ + - https://developer.okta.com/docs/reference/api/event-types/ +author: Austin Songer @austinsonger +date: 2021/09/12 +modified: 2022/10/09 +tags: + - attack.impact +logsource: + product: okta + service: okta +detection: + selection: + eventtype: + - policy.lifecycle.update + - policy.lifecycle.delete + condition: selection +falsepositives: + - Okta Policies being modified or deleted may be performed by a system administrator. + - Verify whether the user identity, user agent, and/or hostname should be making changes in your environment. + - Okta Policies modified or deleted from unfamiliar users should be investigated. If known behavior is causing false positives, it can be exempted from the rule. +level: low diff --git a/src/main/resources/rules/okta/okta_policy_rule_modified_or_deleted.yml b/src/main/resources/rules/okta/okta_policy_rule_modified_or_deleted.yml new file mode 100644 index 000000000..958e131d3 --- /dev/null +++ b/src/main/resources/rules/okta/okta_policy_rule_modified_or_deleted.yml @@ -0,0 +1,25 @@ +title: Okta Policy Rule Modified or Deleted +id: 0c97c1d3-4057-45c9-b148-1de94b631931 +status: test +description: Detects when an Policy Rule is Modified or Deleted. +references: + - https://developer.okta.com/docs/reference/api/system-log/ + - https://developer.okta.com/docs/reference/api/event-types/ +author: Austin Songer @austinsonger +date: 2021/09/12 +modified: 2022/10/09 +tags: + - attack.impact +logsource: + product: okta + service: okta +detection: + selection: + eventtype: + - policy.rule.update + - policy.rule.delete + condition: selection +falsepositives: + - Unknown + +level: medium diff --git a/src/main/resources/rules/okta/okta_security_threat_detected.yml b/src/main/resources/rules/okta/okta_security_threat_detected.yml new file mode 100644 index 000000000..0cffb48f9 --- /dev/null +++ b/src/main/resources/rules/okta/okta_security_threat_detected.yml @@ -0,0 +1,21 @@ +title: Okta Security Threat Detected +id: 5c82f0b9-3c6d-477f-a318-0e14a1df73e0 +status: test +description: Detects when an security threat is detected in Okta. +references: + - https://okta.github.io/okta-help/en/prod/Content/Topics/Security/threat-insight/configure-threatinsight-system-log.htm + - https://developer.okta.com/docs/reference/api/system-log/ + - https://developer.okta.com/docs/reference/api/event-types/ +author: Austin Songer @austinsonger +date: 2021/09/12 +modified: 2022/10/09 +logsource: + product: okta + service: okta +detection: + selection: + eventtype: security.threat.detected + condition: selection +falsepositives: + - Unknown +level: medium diff --git a/src/main/resources/rules/okta/okta_unauthorized_access_to_app.yml b/src/main/resources/rules/okta/okta_unauthorized_access_to_app.yml new file mode 100644 index 000000000..0206a7b96 --- /dev/null +++ b/src/main/resources/rules/okta/okta_unauthorized_access_to_app.yml @@ -0,0 +1,22 @@ +title: Okta Unauthorized Access to App +id: 6cc2b61b-d97e-42ef-a9dd-8aa8dc951657 +status: test +description: Detects when unauthorized access to app occurs. +references: + - https://developer.okta.com/docs/reference/api/system-log/ + - https://developer.okta.com/docs/reference/api/event-types/ +author: Austin Songer @austinsonger +date: 2021/09/12 +modified: 2022/10/09 +tags: + - attack.impact +logsource: + product: okta + service: okta +detection: + selection: + displaymessage: User attempted unauthorized access to app + condition: selection +falsepositives: + - User might of believe that they had access. +level: medium diff --git a/src/main/resources/rules/okta/okta_user_account_locked_out.yml b/src/main/resources/rules/okta/okta_user_account_locked_out.yml new file mode 100644 index 000000000..6a55d16e3 --- /dev/null +++ b/src/main/resources/rules/okta/okta_user_account_locked_out.yml @@ -0,0 +1,23 @@ +title: Okta User Account Locked Out +id: 14701da0-4b0f-4ee6-9c95-2ffb4e73bb9a +status: test +description: Detects when an user account is locked out. +references: + - https://developer.okta.com/docs/reference/api/system-log/ + - https://developer.okta.com/docs/reference/api/event-types/ +author: Austin Songer @austinsonger +date: 2021/09/12 +modified: 2022/10/09 +tags: + - attack.impact + - attack.t1531 +logsource: + product: okta + service: okta +detection: + selection: + displaymessage: Max sign in attempts exceeded + condition: selection +falsepositives: + - Unknown +level: medium diff --git a/src/main/resources/rules/rule_categories.json b/src/main/resources/rules/rule_categories.json new file mode 100644 index 000000000..bf8520c78 --- /dev/null +++ b/src/main/resources/rules/rule_categories.json @@ -0,0 +1,56 @@ +{ + "rule_categories": [ + { + "key": "ad_ldap", + "display_name": "AD/LDAP logs" + }, + { + "key": "dns", + "display_name": "DNS logs" + }, + { + "key": "network", + "display_name": "Network logs" + }, + { + "key": "apache_access", + "display_name": "Apache access logs" + }, + { + "key": "cloudtrail", + "display_name": "Cloud Trail logs" + }, + { + "key": "s3", + "display_name": "S3 access logs" + }, + { + "key": "windows", + "display_name": "Windows logs logs" + }, + { + "key": "gworkspace", + "display_name": "Google Workspace logs" + }, + { + "key": "github", + "display_name": "Github logs" + }, + { + "key": "m365", + "display_name": "Microsoft 365 logs" + }, + { + "key": "okta", + "display_name": "Okta logs" + }, + { + "key": "azure", + "display_name": "Azure logs" + }, + { + "key": "linux", + "display_name": "System logs" + } + ] +} \ No newline at end of file diff --git a/src/main/resources/rules/test_windows/dns_query_win_regsvr32_network_activity.yml b/src/main/resources/rules/test_windows/dns_query_win_regsvr32_network_activity.yml new file mode 100644 index 000000000..c812ab379 --- /dev/null +++ b/src/main/resources/rules/test_windows/dns_query_win_regsvr32_network_activity.yml @@ -0,0 +1,35 @@ +title: Regsvr32 Network Activity +id: 36a037c4-c228-4866-b6a3-48eb292b9955 +related: + - id: c7e91a02-d771-4a6d-a700-42587e0b1095 + type: derived +description: Detects network connections and DNS queries initiated by Regsvr32.exe +references: + - https://pentestlab.blog/2017/05/11/applocker-bypass-regsvr32/ + - https://oddvar.moe/2017/12/13/applocker-case-study-how-insecure-is-it-really-part-1/ + - https://github.com/redcanaryco/atomic-red-team/blob/master/atomics/T1117/T1117.md +tags: + - attack.execution + - attack.t1559.001 + - attack.defense_evasion + - attack.t1218.010 +author: Dmitriy Lifanov, oscd.community +status: experimental +date: 2019/10/25 +modified: 2021/09/21 +logsource: + category: dns_query + product: windows +detection: + selection: + Image|endswith: '\regsvr32.exe' + condition: selection +fields: + - ComputerName + - User + - Image + - DestinationIp + - DestinationPort +falsepositives: + - Unknown +level: high \ No newline at end of file diff --git a/src/main/resources/rules/test_windows/net_connection_win_regsvr32_network_activity.yml b/src/main/resources/rules/test_windows/net_connection_win_regsvr32_network_activity.yml new file mode 100644 index 000000000..908fda8a4 --- /dev/null +++ b/src/main/resources/rules/test_windows/net_connection_win_regsvr32_network_activity.yml @@ -0,0 +1,32 @@ +title: Regsvr32 Network Activity +id: c6e91a02-d771-4a6d-a700-42587e0b1095 +description: Detects network connections and DNS queries initiated by Regsvr32.exe +references: + - https://pentestlab.blog/2017/05/11/applocker-bypass-regsvr32/ + - https://oddvar.moe/2017/12/13/applocker-case-study-how-insecure-is-it-really-part-1/ + - https://github.com/redcanaryco/atomic-red-team/blob/master/atomics/T1117/T1117.md +author: Dmitriy Lifanov, oscd.community +status: experimental +date: 2019/10/25 +modified: 2021/09/21 +logsource: + category: network_connection + product: windows +detection: + selection: + Image|endswith: '\regsvr32.exe' + condition: selection +fields: + - ComputerName + - User + - Image + - DestinationIp + - DestinationPort +falsepositives: + - Unknown +level: high +tags: + - attack.execution + - attack.t1559.001 + - attack.defense_evasion + - attack.t1218.010 \ No newline at end of file diff --git a/src/main/resources/rules/test_windows/proc_creation_win_susp_regsvr32_no_dll.yml b/src/main/resources/rules/test_windows/proc_creation_win_susp_regsvr32_no_dll.yml new file mode 100644 index 000000000..6207cc6ec --- /dev/null +++ b/src/main/resources/rules/test_windows/proc_creation_win_susp_regsvr32_no_dll.yml @@ -0,0 +1,38 @@ +title: Regsvr32 Command Line Without DLL +id: 5a919691-7302-437f-8e10-1fe088afa145 +status: experimental +description: Detects a regsvr.exe execution that doesn't contain a DLL in the command line +author: Florian Roth +date: 2019/07/17 +modified: 2021/10/19 +references: + - https://app.any.run/tasks/34221348-072d-4b70-93f3-aa71f6ebecad/ +tags: + - attack.defense_evasion + - attack.t1574 + - attack.execution +logsource: + category: process_creation + product: windows +detection: + selection: + Image|endswith: '\regsvr32.exe' + filter: + CommandLine|contains: + - '.dll' + - '.ocx' + - '.cpl' + - '.ax' + - '.bav' + - '.ppl' + filter_null1_for_4688: + CommandLine: null + filter_null2_for_4688: + CommandLine: '' + condition: selection and not filter and not filter_null1_for_4688 and not filter_null2_for_4688 +fields: + - CommandLine + - ParentCommandLine +falsepositives: + - Unknown +level: high diff --git a/src/main/resources/rules/test_windows/proc_creation_win_system_exe_anomaly.yml b/src/main/resources/rules/test_windows/proc_creation_win_system_exe_anomaly.yml new file mode 100644 index 000000000..77127b5ab --- /dev/null +++ b/src/main/resources/rules/test_windows/proc_creation_win_system_exe_anomaly.yml @@ -0,0 +1,81 @@ +title: System File Execution Location Anomaly +id: e5a6b256-3e47-40fc-89d2-7a477edd6915 +status: experimental +description: Detects a Windows program executable started in a suspicious folder +references: + - https://twitter.com/GelosSnake/status/934900723426439170 +author: Florian Roth, Patrick Bareiss, Anton Kutepov, oscd.community, Nasreddine Bencherchali +date: 2017/11/27 +modified: 2022/07/03 +tags: + - attack.defense_evasion + - attack.t1036 +logsource: + category: process_creation + product: windows +detection: + selection: + Image|endswith: + - '\svchost.exe' + - '\rundll32.exe' + - '\services.exe' + - '\powershell.exe' + - '\powershell_ise.exe' + - '\regsvr32.exe' + - '\spoolsv.exe' + - '\lsass.exe' + - '\smss.exe' + - '\csrss.exe' + - '\conhost.exe' + - '\wininit.exe' + - '\lsm.exe' + - '\winlogon.exe' + - '\explorer.exe' + - '\taskhost.exe' + - '\Taskmgr.exe' + - '\sihost.exe' + - '\RuntimeBroker.exe' + - '\smartscreen.exe' + - '\dllhost.exe' + - '\audiodg.exe' + - '\wlanext.exe' + - '\dashost.exe' + - '\schtasks.exe' + - '\cscript.exe' + - '\wscript.exe' + - '\wsl.exe' + - '\bitsadmin.exe' + - '\atbroker.exe' + - '\bcdedit.exe' + - '\certutil.exe' + - '\certreq.exe' + - '\cmstp.exe' + - '\consent.exe' + - '\defrag.exe' + - '\dism.exe' + - '\dllhst3g.exe' + - '\eventvwr.exe' + - '\msiexec.exe' + - '\runonce.exe' + - '\winver.exe' + - '\logonui.exe' + - '\userinit.exe' + - '\dwm.exe' + - '\LsaIso.exe' + - '\ntoskrnl.exe' + filter: + - Image|startswith: + - 'C:\Windows\System32\' + - 'C:\Windows\SysWOW64\' + - 'C:\Windows\WinSxS\' + - 'C:\avast! sandbox' + - Image|contains: '\SystemRoot\System32\' + - Image: 'C:\Windows\explorer.exe' + condition: selection and not filter +fields: + - ComputerName + - User + - Image +falsepositives: + - Exotic software +level: high diff --git a/src/main/resources/rules/test_windows/win_sample_rule.yml b/src/main/resources/rules/test_windows/win_sample_rule.yml new file mode 100644 index 000000000..5476dec37 --- /dev/null +++ b/src/main/resources/rules/test_windows/win_sample_rule.yml @@ -0,0 +1,26 @@ +title: QuarksPwDump Clearing Access History +id: 06724b9a-52fc-11ed-bdc3-0242ac120002 +status: experimental +description: Detects QuarksPwDump clearing access history in hive +author: Florian Roth +date: 2017/05/15 +modified: 2019/11/13 +tags: + - attack.credential_access + - attack.t1003 # an old one + - attack.t1003.002 + - attack.defense_evasion +level: critical +logsource: + product: windows + service: system +detection: + selection: + EventID: 22 + Message|contains: 'C:\\Program Files\\nxlog\\nxlog.exe' + HostName|startswith: 'EC2AMAZ' + keywords: + - "NT AUTHORITY" + condition: selection or keywords +falsepositives: + - Unknown \ No newline at end of file diff --git a/src/main/resources/rules/windows/builtin/system/win_sample_rule.yml b/src/main/resources/rules/windows/builtin/system/win_sample_rule.yml index b38d74cc8..e0bb61e09 100644 --- a/src/main/resources/rules/windows/builtin/system/win_sample_rule.yml +++ b/src/main/resources/rules/windows/builtin/system/win_sample_rule.yml @@ -16,9 +16,9 @@ logsource: service: system detection: selection: - EventID: 22 - Message|contains: 'C:\\Program Files\\nxlog\\nxlog.exe' - HostName|startswith: 'EC2AMAZ' + EventId: 22 + message|contains: 'C:\\Program Files\\nxlog\\nxlog.exe' + hostname|startswith: 'EC2AMAZ' condition: selection falsepositives: - Unknown \ No newline at end of file diff --git a/src/test/java/org/opensearch/securityanalytics/DetectorThreatIntelIT.java b/src/test/java/org/opensearch/securityanalytics/DetectorThreatIntelIT.java new file mode 100644 index 000000000..e69de29bb diff --git a/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsClientUtils.java b/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsClientUtils.java index af8adac95..73d81abb2 100644 --- a/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsClientUtils.java +++ b/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsClientUtils.java @@ -12,11 +12,11 @@ import org.opensearch.cluster.ClusterModule; import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.collect.ImmutableOpenMap; -import org.opensearch.common.xcontent.DeprecationHandler; -import org.opensearch.common.xcontent.NamedXContentRegistry; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentParserUtils; import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.xcontent.DeprecationHandler; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.index.mapper.MapperService; import org.opensearch.test.rest.OpenSearchRestTestCase; diff --git a/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsPluginRestIT.java b/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsPluginRestApiIT.java similarity index 84% rename from src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsPluginRestIT.java rename to src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsPluginRestApiIT.java index 06114f3d2..9ad61870a 100644 --- a/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsPluginRestIT.java +++ b/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsPluginRestApiIT.java @@ -8,15 +8,14 @@ import org.opensearch.client.Request; import org.opensearch.client.Response; import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.common.xcontent.NamedXContentRegistry; import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.test.rest.OpenSearchRestTestCase; +import org.opensearch.core.xcontent.NamedXContentRegistry; import java.io.IOException; import java.util.List; import java.util.Map; -public class SecurityAnalyticsPluginRestIT extends OpenSearchRestTestCase { +public class SecurityAnalyticsPluginRestApiIT extends SecurityAnalyticsRestTestCase { @SuppressWarnings("unchecked") public void testPluginsAreInstalled() throws IOException { diff --git a/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsPluginIT.java b/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsPluginTransportIT.java similarity index 94% rename from src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsPluginIT.java rename to src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsPluginTransportIT.java index 70233077c..688df56a0 100644 --- a/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsPluginIT.java +++ b/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsPluginTransportIT.java @@ -17,7 +17,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -public class SecurityAnalyticsPluginIT extends OpenSearchIntegTestCase { +/*public class SecurityAnalyticsPluginTransportIT extends OpenSearchIntegTestCase { public void testPluginsAreInstalled() { NodesInfoRequest nodesInfoRequest = new NodesInfoRequest(); @@ -30,4 +30,4 @@ public void testPluginsAreInstalled() { Assert.assertTrue(pluginInfos.stream().anyMatch(pluginInfo -> pluginInfo.getName() .equals("opensearch-security-analytics"))); } -} \ No newline at end of file +}*/ \ No newline at end of file diff --git a/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsRestTestCase.java b/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsRestTestCase.java index a3511b862..bfae03c62 100644 --- a/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsRestTestCase.java +++ b/src/test/java/org/opensearch/securityanalytics/SecurityAnalyticsRestTestCase.java @@ -4,12 +4,20 @@ */ package org.opensearch.securityanalytics; +import java.util.Set; +import org.apache.http.HttpHost; +import java.util.ArrayList; +import java.util.function.BiConsumer; +import java.nio.file.Path; import org.apache.http.Header; import org.apache.http.HttpEntity; +import org.apache.http.HttpStatus; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.http.message.BasicHeader; import org.junit.Assert; +import org.junit.After; +import org.junit.Before; import org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.opensearch.action.search.SearchResponse; import org.opensearch.client.Request; @@ -17,32 +25,47 @@ import org.opensearch.client.Response; import org.opensearch.client.ResponseException; import org.opensearch.client.RestClient; +import org.opensearch.client.RestClientBuilder; import org.opensearch.client.WarningsHandler; import org.opensearch.cluster.ClusterModule; import org.opensearch.cluster.metadata.MappingMetadata; +import org.opensearch.common.Strings; import org.opensearch.common.UUIDs; import org.opensearch.common.collect.ImmutableOpenMap; +import org.opensearch.common.io.PathUtils; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.DeprecationHandler; -import org.opensearch.common.xcontent.NamedXContentRegistry; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.commons.alerting.model.ScheduledJob; import org.opensearch.commons.alerting.util.IndexUtilsKt; +import org.opensearch.commons.rest.SecureRestClientBuilder; +import org.opensearch.commons.ConfigConstants; +import org.opensearch.core.xcontent.DeprecationHandler; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.index.IndexSettings; import org.opensearch.index.mapper.MapperService; import org.opensearch.rest.RestStatus; import org.opensearch.search.SearchHit; +import org.opensearch.securityanalytics.action.AlertDto; import org.opensearch.securityanalytics.action.CreateIndexMappingsRequest; import org.opensearch.securityanalytics.action.UpdateIndexMappingsRequest; +import org.opensearch.securityanalytics.config.monitors.DetectorMonitorConfig; +import org.opensearch.securityanalytics.correlation.index.query.CorrelationQueryBuilder; +import org.opensearch.securityanalytics.mapper.MappingsTraverser; +import org.opensearch.securityanalytics.model.CorrelationRule; import org.opensearch.securityanalytics.model.Detector; import org.opensearch.securityanalytics.model.Rule; import org.opensearch.test.rest.OpenSearchRestTestCase; + import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -52,9 +75,119 @@ import java.util.stream.Collectors; import static org.opensearch.action.admin.indices.create.CreateIndexRequest.MAPPINGS; +import static org.opensearch.securityanalytics.SecurityAnalyticsPlugin.MAPPER_BASE_URI; +import static org.opensearch.securityanalytics.TestHelpers.sumAggregationTestRule; +import static org.opensearch.securityanalytics.TestHelpers.productIndexAvgAggRule; +import static org.opensearch.securityanalytics.TestHelpers.windowsIndexMapping; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ALERT_HISTORY_INDEX_MAX_AGE; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ALERT_HISTORY_MAX_DOCS; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ALERT_HISTORY_RETENTION_PERIOD; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ALERT_HISTORY_ROLLOVER_PERIOD; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.FINDING_HISTORY_INDEX_MAX_AGE; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.FINDING_HISTORY_MAX_DOCS; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.FINDING_HISTORY_RETENTION_PERIOD; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.FINDING_HISTORY_ROLLOVER_PERIOD; +import static org.opensearch.securityanalytics.util.RuleTopicIndices.ruleTopicIndexSettings; public class SecurityAnalyticsRestTestCase extends OpenSearchRestTestCase { + protected void createRuleTopicIndex(String detectorType, String additionalMapping) throws IOException { + + String mappings = "" + + " \"_meta\": {" + + " \"schema_version\": 1" + + " }," + + " \"properties\": {" + + " \"query\": {" + + " \"type\": \"percolator_ext\"" + + " }," + + " \"monitor_id\": {" + + " \"type\": \"text\"" + + " }," + + " \"index\": {" + + " \"type\": \"text\"" + + " }" + + " }"; + + String indexName = DetectorMonitorConfig.getRuleIndex(detectorType); + createIndex( + indexName, + Settings.builder().loadFromSource(ruleTopicIndexSettings(), XContentType.JSON).build(), + mappings + ); + // Update mappings + if (additionalMapping != null) { + Response response = makeRequest(client(), "PUT", indexName + "/_mapping", Collections.emptyMap(), new StringEntity(additionalMapping), new BasicHeader("Content-Type", "application/json")); + assertEquals(RestStatus.OK, restStatus(response)); + } + } + + protected String createDetector(Detector detector) throws IOException { + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + return responseBody.get("_id").toString(); + } + + protected void deleteDetector(String detectorId) throws IOException { + makeRequest(client(), "DELETE", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), null); + } + + protected List getAllComponentTemplates() throws IOException { + Response response = makeRequest(client(), "GET", "_component_template", Collections.emptyMap(), null); + assertEquals(RestStatus.OK, restStatus(response)); + Map responseBody = asMap(response); + return (List) responseBody.get("component_templates"); + } + + protected List getAllComposableIndexTemplates() throws IOException { + Response response = makeRequest(client(), "GET", "_index_template", Collections.emptyMap(), null); + assertEquals(RestStatus.OK, restStatus(response)); + Map responseBody = asMap(response); + return (List) responseBody.get("index_templates"); + } + + @SuppressWarnings("unchecked") + protected List> searchCorrelatedFindings(String findingId, String detectorType, long timeWindow, int nearestFindings) throws IOException { + Response response = makeRequest(client(), "GET", "/_plugins/_security_analytics/findings/correlate", Map.of("finding", findingId, "detector_type", detectorType, + "time_window", String.valueOf(timeWindow), "nearby_findings", String.valueOf(nearestFindings)), + null, new BasicHeader("Content-Type", "application/json")); + return (List>) entityAsMap(response).get("findings"); + } + + @Before + void setDebugLogLevel() throws IOException { + StringEntity se = new StringEntity("{\n" + + " \"transient\": {\n" + + " \"logger.org.opensearch.securityanalytics\":\"DEBUG\",\n" + + " \"logger.org.opensearch.jobscheduler\":\"DEBUG\",\n" + + " \"logger.org.opensearch.alerting\":\"DEBUG\"\n" + + " }\n" + + " }"); + + + + makeRequest(client(), "PUT", "_cluster/settings", Collections.emptyMap(), se, new BasicHeader("Content-Type", "application/json")); + } + + protected final List clusterPermissions = List.of( + "cluster:admin/opensearch/securityanalytics/detector/*", + "cluster:admin/opendistro/alerting/alerts/*", + "cluster:admin/opendistro/alerting/findings/*", + "cluster:admin/opensearch/securityanalytics/mapping/*", + "cluster:admin/opensearch/securityanalytics/rule/*" + ); + + protected final List indexPermissions = List.of( + "indices:admin/mappings/get", + "indices:admin/mapping/put", + "indices:data/read/search" + ); + + protected static String TEST_HR_ROLE = "hr_role"; + protected String createTestIndex(String index, String mapping) throws IOException { createTestIndex(index, mapping, Settings.EMPTY); return index; @@ -65,6 +198,34 @@ protected String createTestIndex(String index, String mapping, Settings settings return index; } + protected String createTestIndex(RestClient client, String index, String mapping, Settings settings) throws IOException { + Request request = new Request("PUT", "/" + index); + String entity = "{\"settings\": " + settings.toString(); + if (mapping != null) { + entity = entity + ",\"mappings\" : {" + mapping + "}"; + } + + entity = entity + "}"; + if (!settings.getAsBoolean(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)) { + expectSoftDeletesWarning(request, index); + } + + request.setJsonEntity(entity); + client.performRequest(request); + return index; + } + + protected String createDocumentWithNFields(int numOfFields) { + StringBuilder doc = new StringBuilder(); + doc.append("{"); + for(int i = 0; i < numOfFields - 1; i++) { + doc.append("\"id").append(i).append("\": 5,"); + } + doc.append("\"last_field\": 100 }"); + + return doc.toString(); + } + protected Response makeRequest(RestClient client, String method, String endpoint, Map params, HttpEntity entity, Header... headers) throws IOException { Request request = new Request(method, endpoint); @@ -82,6 +243,64 @@ protected Response makeRequest(RestClient client, String method, String endpoint return client.performRequest(request); } + protected Settings getCorrelationDefaultIndexSettings() { + return Settings.builder().put("number_of_shards", 1).put("number_of_replicas", 0).put("index.correlation", true).build(); + } + + protected String createTestIndexWithMappingJson(RestClient client, String index, String mapping, Settings settings) throws IOException { + Request request = new Request("PUT", "/" + index); + String entity = "{\"settings\": " + Strings.toString(XContentType.JSON, settings); + if (mapping != null) { + entity = entity + ",\"mappings\" : " + mapping; + } + + entity = entity + "}"; + if (!settings.getAsBoolean(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)) { + expectSoftDeletesWarning(request, index); + } + + request.setJsonEntity(entity); + client.performRequest(request); + return index; + } + + protected void addCorrelationDoc(String index, String docId, List fieldNames, List vectors) throws IOException { + Request request = new Request("POST", "/" + index + "/_doc/" + docId + "?refresh=true"); + + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + for (int i = 0; i < fieldNames.size(); i++) { + builder.field(fieldNames.get(i), vectors.get(i)); + } + builder.endObject(); + + request.setJsonEntity(Strings.toString(builder)); + Response response = client().performRequest(request); + assertEquals(request.getEndpoint() + ": failed", RestStatus.CREATED, RestStatus.fromCode(response.getStatusLine().getStatusCode())); + } + + protected int getDocCount(String index) throws IOException { + Response response = makeRequest(client(), "GET", String.format(Locale.getDefault(), "/%s/_count", index), Collections.emptyMap(), null); + Assert.assertEquals(RestStatus.OK, restStatus(response)); + return Integer.parseInt(entityAsMap(response).get("count").toString()); + } + + protected Response searchCorrelationIndex(String index, CorrelationQueryBuilder correlationQueryBuilder, int resultSize) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject().startObject("query"); + correlationQueryBuilder.doXContent(builder, ToXContent.EMPTY_PARAMS); + builder.endObject().endObject(); + + Request request = new Request("POST", "/" + index + "/_search"); + + request.addParameter("size", Integer.toString(resultSize)); + request.addParameter("explain", Boolean.toString(true)); + request.addParameter("search_type", "query_then_fetch"); + request.setJsonEntity(Strings.toString(builder)); + + Response response = client().performRequest(request); + Assert.assertEquals("Search failed", RestStatus.OK, restStatus(response)); + return response; + } + protected Boolean doesIndexExist(String index) throws IOException { Response response = makeRequest(client(), "HEAD", String.format(Locale.getDefault(), "/%s", index), Collections.emptyMap(), null); return RestStatus.OK.equals(restStatus(response)); @@ -95,6 +314,18 @@ protected Response executeAlertingMonitor(RestClient client, String monitorId, M return makeRequest(client, "POST", String.format(Locale.getDefault(), "/_plugins/_alerting/monitors/%s/_execute", monitorId), params, null); } + protected Response deleteAlertingMonitorIndex() throws IOException { + return makeRequest(client(), "DELETE", String.format(Locale.getDefault(), "/.opendistro-alerting-config"), new HashMap<>(), null); + } + + protected Response deleteAlertingMonitor(String monitorId) throws IOException { + return deleteAlertingMonitor(client(), monitorId); + } + + protected Response deleteAlertingMonitor(RestClient client, String monitorId) throws IOException { + return makeRequest(client, "DELETE", String.format(Locale.getDefault(), "/_plugins/_alerting/monitors/%s", monitorId), new HashMap<>(), null); + } + protected List executeSearch(String index, String request) throws IOException { return executeSearch(index, request, true); } @@ -169,7 +400,7 @@ protected List getRandomPrePackagedRules() throws IOException { " \"query\": {\n" + " \"bool\": {\n" + " \"must\": [\n" + - " { \"match\": {\"rule.category\": \"windows\"}}\n" + + " { \"match\": {\"rule.category\": \"" + TestHelpers.randomDetectorType().toLowerCase(Locale.ROOT) + "\"}}\n" + " ]\n" + " }\n" + " }\n" + @@ -186,6 +417,18 @@ protected List getRandomPrePackagedRules() throws IOException { return hits.stream().map(hit -> hit.get("_id").toString()).collect(Collectors.toList()); } + protected List createAggregationRules () throws IOException { + return new ArrayList<>(Arrays.asList(createRule(productIndexAvgAggRule()), createRule(sumAggregationTestRule()))); + } + + protected String createRule(String rule) throws IOException { + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", "test_windows"), + new StringEntity(rule), new BasicHeader("Content-Type", "application/json")); + Assert.assertEquals("Create rule failed", RestStatus.CREATED, restStatus(createResponse)); + Map responseBody = asMap(createResponse); + return responseBody.get("_id").toString(); + } + protected List getPrePackagedRules(String ruleCategory) throws IOException { String request = "{\n" + " \"from\": 0\n," + @@ -220,8 +463,7 @@ protected Response indexDoc(String index, String id, String doc) throws IOExcept protected Response indexDoc(RestClient client, String index, String id, String doc, Boolean refresh) throws IOException { StringEntity requestBody = new StringEntity(doc, ContentType.APPLICATION_JSON); Map params = refresh? Map.of("refresh", "true"): Collections.emptyMap(); - Response response = makeRequest(client, "PUT", String.format(Locale.getDefault(), "%s/_doc/%s", index, id), params, requestBody); - + Response response = makeRequest(client, "POST", String.format(Locale.getDefault(), "%s/_doc/%s?op_type=create", index, id), params, requestBody); Assert.assertTrue(String.format(Locale.getDefault(), "Unable to index doc: '%s...' to index: '%s'", doc.substring(0, 15), index), List.of(RestStatus.OK, RestStatus.CREATED).contains(restStatus(response))); return response; } @@ -293,6 +535,20 @@ public static SearchResponse executeSearchRequest(String indexName, String query return SearchResponse.fromXContent(parser); } + public static SearchResponse executeSearchRequest(RestClient client, String indexName, String queryJson) throws IOException { + + Request request = new Request("GET", indexName + "/_search"); + request.setJsonEntity(queryJson); + Response response = client.performRequest(request); + + XContentParser parser = JsonXContent.jsonXContent.createParser( + new NamedXContentRegistry(ClusterModule.getNamedXWriteables()), + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + response.getEntity().getContent() + ); + return SearchResponse.fromXContent(parser); + } + protected HttpEntity toHttpEntity(Detector detector) throws IOException { return new StringEntity(toJsonString(detector), ContentType.APPLICATION_JSON); } @@ -309,6 +565,10 @@ protected HttpEntity toHttpEntity(UpdateIndexMappingsRequest request) throws IOE return new StringEntity(toJsonString(request), ContentType.APPLICATION_JSON); } + protected HttpEntity toHttpEntity(CorrelationRule rule) throws IOException { + return new StringEntity(toJsonString(rule), ContentType.APPLICATION_JSON); + } + protected RestStatus restStatus(Response response) { return RestStatus.fromCode(response.getStatusLine().getStatusCode()); } @@ -337,6 +597,11 @@ private String toJsonString(UpdateIndexMappingsRequest request) throws IOExcepti return IndexUtilsKt.string(shuffleXContent(request.toXContent(builder, ToXContent.EMPTY_PARAMS))); } + protected String toJsonString(CorrelationRule rule) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder(); + return IndexUtilsKt.string(shuffleXContent(rule.toXContent(builder, ToXContent.EMPTY_PARAMS))); + } + private String alertingScheduledJobMappings() { return " \"_meta\" : {\n" + " \"schema_version\": 5\n" + @@ -847,4 +1112,542 @@ private String alertingScheduledJobMappings() { " }\n" + " }"; } + + protected boolean isHttps() { + return Boolean.parseBoolean(System.getProperty("https", "false")); + } + + protected boolean securityEnabled() { + return Boolean.parseBoolean(System.getProperty("https", "false")); + } + + @Override + protected String getProtocol() { + if (isHttps()) { + return "https"; + } else { + return "http"; + } + } + + @Override + protected Settings restAdminSettings() { + + return Settings + .builder() + .put("http.port", 9200) + .put(ConfigConstants.OPENSEARCH_SECURITY_SSL_HTTP_ENABLED, isHttps()) + .put(ConfigConstants.OPENSEARCH_SECURITY_SSL_HTTP_PEMCERT_FILEPATH, "sample.pem") + .put(ConfigConstants.OPENSEARCH_SECURITY_SSL_HTTP_KEYSTORE_FILEPATH, "test-kirk.jks") + .put(ConfigConstants.OPENSEARCH_SECURITY_SSL_HTTP_KEYSTORE_PASSWORD, "changeit") + .put(ConfigConstants.OPENSEARCH_SECURITY_SSL_HTTP_KEYSTORE_KEYPASSWORD, "changeit") + .build(); + } + + + + @Override + protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOException + { + if (securityEnabled()) { + String keystore = settings.get(ConfigConstants.OPENSEARCH_SECURITY_SSL_HTTP_KEYSTORE_FILEPATH); + if (keystore != null) { + // create adminDN (super-admin) client + //log.info("keystore not null"); + URI uri = null; + try { + uri = SecurityAnalyticsRestTestCase.class.getClassLoader().getResource("sample.pem").toURI(); + } + catch(URISyntaxException e) { + return null; + } + Path configPath = PathUtils.get(uri).getParent().toAbsolutePath(); + return new SecureRestClientBuilder(settings, configPath).setSocketTimeout(60000).build(); + } + else { + // create client with passed user + String userName = System.getProperty("user"); + String password = System.getProperty("password"); + return new SecureRestClientBuilder(hosts, isHttps(), userName, password).setSocketTimeout(60000).build(); + } + } + else { + RestClientBuilder builder = RestClient.builder(hosts); + configureClient(builder, settings); + builder.setStrictDeprecationMode(true); + return builder.build(); + } + + } + + protected void createIndexRole(String name, List clusterPermissions, List indexPermission, List indexPatterns) throws IOException { + Response response; + try { + response = client().performRequest(new Request("GET", String.format(Locale.getDefault(), "/_plugins/_security/api/roles/%s", name))); + } catch (ResponseException ex) { + response = ex.getResponse(); + } + // Role already exists + if(response.getStatusLine().getStatusCode() == RestStatus.OK.getStatus()) { + return; + } + + Request request = new Request("PUT", String.format(Locale.getDefault(), "/_plugins/_security/api/roles/%s", name)); + String clusterPermissionsStr = clusterPermissions.stream().map(p -> "\"" + p + "\"").collect(Collectors.joining(",")); + String indexPermissionsStr = indexPermission.stream().map(p -> "\"" + p + "\"").collect(Collectors.joining(",")); + String indexPatternsStr = indexPatterns.stream().map(p -> "\"" + p + "\"").collect(Collectors.joining(",")); + + String entity = "{\n" + + "\"cluster_permissions\": [\n" + + "" + clusterPermissionsStr + "\n" + + "], \n" + + "\"index_permissions\": [\n" + + "{" + + "\"fls\": [], " + + "\"masked_fields\": [], " + + "\"allowed_actions\": [" + indexPermissionsStr + "], " + + "\"index_patterns\": [" + indexPatternsStr + "]" + + "}" + + "], " + + "\"tenant_permissions\": []" + + "}"; + + request.setJsonEntity(entity); + client().performRequest(request); + } + + protected void createCustomRole(String name, String clusterPermissions) throws IOException { + Request request = new Request("PUT", String.format(Locale.getDefault(), "/_plugins/_security/api/roles/%s", name)); + String entity = "{\n" + + "\"cluster_permissions\": [\n" + + "\"" + clusterPermissions + "\"\n" + + "]\n" + + "}"; + request.setJsonEntity(entity); + client().performRequest(request); + } + + public void createUser(String name, String passwd, String[] backendRoles) throws IOException { + Request request = new Request("PUT", String.format(Locale.getDefault(), "/_plugins/_security/api/internalusers/%s", name)); + String broles = String.join(",", backendRoles); + //String roles = String.join(",", customRoles); + String entity = " {\n" + + "\"password\": \"" + passwd + "\",\n" + + "\"backend_roles\": [\"" + broles + "\"],\n" + + "\"attributes\": {\n" + + "}} "; + request.setJsonEntity(entity); + client().performRequest(request); + } + + protected void createUserRolesMapping(String role, String[] users) throws IOException { + Request request = new Request("PUT", String.format(Locale.getDefault(), "/_plugins/_security/api/rolesmapping/%s", role)); + String usersArr= String.join(",", users); + String entity = "{\n" + + " \"backend_roles\" : [ ],\n" + + " \"hosts\" : [ ],\n" + + "\"users\": [\"" + usersArr + "\"]\n" + + "}"; + request.setJsonEntity(entity); + client().performRequest(request); + } + + protected void enableOrDisableFilterBy(String trueOrFalse) throws IOException { + Request request = new Request("PUT", "_cluster/settings"); + String entity = "{\"persistent\":{\"plugins.security_analytics.filter_by_backend_roles\" : " + trueOrFalse + "}}"; + request.setJsonEntity(entity); + client().performRequest(request); + } + + protected void createUserWithDataAndCustomRole(String userName, String userPasswd, String roleName, String[] backendRoles, String clusterPermissions ) throws IOException { + String[] users = {userName}; + createUser(userName, userPasswd, backendRoles); + createCustomRole(roleName, clusterPermissions); + createUserRolesMapping(roleName, users); + } + + protected void createUserWithDataAndCustomRole(String userName, String userPasswd, String roleName, String[] backendRoles, List clusterPermissions, List indexPermissions, List indexPatterns) throws IOException { + String[] users = {userName}; + createUser(userName, userPasswd, backendRoles); + createIndexRole(roleName, clusterPermissions, indexPermissions, indexPatterns); + createUserRolesMapping(roleName, users); + } + + protected void createUserWithData(String userName, String userPasswd, String roleName, String[] backendRoles ) throws IOException { + String[] users = {userName}; + createUser(userName, userPasswd, backendRoles); + createUserRolesMapping(roleName, users); + } + + public void createUserWithTestData(String user, String index, String role, String [] backendRoles, List indexPermissions) throws IOException{ + String[] users = {user}; + createUser(user, user, backendRoles); + createTestIndex(client(), index, windowsIndexMapping(), Settings.EMPTY); + createIndexRole(role, Collections.emptyList(), indexPermissions, List.of(index)); + createUserRolesMapping(role, users); + } + + protected void deleteUser(String name) throws IOException { + Request request = new Request("DELETE", String.format(Locale.getDefault(), "/_plugins/_security/api/internalusers/%s", name)); + client().performRequest(request); + } + + protected void tryDeletingRole(String name) throws IOException{ + Response response; + try { + response = client().performRequest(new Request("GET", String.format(Locale.getDefault(), "/_plugins/_security/api/roles/%s", name))); + } catch (ResponseException ex) { + response = ex.getResponse(); + } + // Role already exists + if(response.getStatusLine().getStatusCode() == RestStatus.OK.getStatus()) { + Request request = new Request("DELETE", String.format(Locale.getDefault(), "/_plugins/_security/api/roles/%s", name)); + client().performRequest(request); + } + } + + @Override + protected boolean preserveIndicesUponCompletion() { + return true; + } + + boolean preserveODFEIndicesAfterTest() { + return false; + } + + @After + protected void wipeAllODFEIndices() throws IOException { + if (preserveODFEIndicesAfterTest()) return; + + Response response = client().performRequest(new Request("GET", "/_cat/indices?format=json&expand_wildcards=all")); + + XContentType xContentType = XContentType.fromMediaType(response.getEntity().getContentType().getValue()); + XContentParser parser = xContentType.xContent().createParser( + NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + response.getEntity().getContent() + ); + + + for (Object index : parser.list()) { + Map jsonObject = (Map) index; + + String indexName = jsonObject.get("index").toString(); + // .opendistro_security isn't allowed to delete from cluster + if (!".opendistro_security".equals(indexName)) { + Request request = new Request("DELETE", String.format(Locale.getDefault(), "/%s", indexName)); + // TODO: remove PERMISSIVE option after moving system index access to REST API call + RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder(); + options.setWarningsHandler(WarningsHandler.PERMISSIVE); + request.setOptions(options.build()); + adminClient().performRequest(request); + } + } + } + + + + public List getAlertIndices(String detectorType) throws IOException { + Response response = client().performRequest(new Request("GET", "/_cat/indices/" + DetectorMonitorConfig.getAllAlertsIndicesPattern(detectorType) + "?format=json")); + XContentParser xcp = createParser(XContentType.JSON.xContent(), response.getEntity().getContent()); + List responseList = xcp.list(); + List indices = new ArrayList<>(); + for (Object o : responseList) { + if (o instanceof Map) { + ((Map) o).forEach((BiConsumer) + (o1, o2) -> { + if (o1.equals("index")) { + indices.add((String) o2); + } + }); + } + } + return indices; + } + + public List getQueryIndices(String detectorType) throws IOException { + Response response = client().performRequest(new Request("GET", "/_cat/indices/" + DetectorMonitorConfig.getRuleIndex(detectorType) + "*?format=json")); + XContentParser xcp = createParser(XContentType.JSON.xContent(), response.getEntity().getContent()); + List responseList = xcp.list(); + List indices = new ArrayList<>(); + for (Object o : responseList) { + if (o instanceof Map) { + ((Map) o).forEach((BiConsumer) + (o1, o2) -> { + if (o1.equals("index")) { + indices.add((String) o2); + } + }); + } + } + return indices; + } + + + public List getFindingIndices(String detectorType) throws IOException { + Response response = client().performRequest(new Request("GET", "/_cat/indices/" + DetectorMonitorConfig.getAllFindingsIndicesPattern(detectorType) + "?format=json")); + XContentParser xcp = createParser(XContentType.JSON.xContent(), response.getEntity().getContent()); + List responseList = xcp.list(); + List indices = new ArrayList<>(); + for (Object o : responseList) { + if (o instanceof Map) { + ((Map) o).forEach((BiConsumer) + (o1, o2) -> { + if (o1.equals("index")) { + indices.add((String) o2); + } + }); + } + } + return indices; + } + + public void updateClusterSetting(String setting, String value) throws IOException { + String settingJson = "{\n" + + " \"persistent\" : {" + + " \"%s\": \"%s\"" + + " }" + + "}"; + settingJson = String.format(settingJson, setting, value); + makeRequest(client(), "PUT", "_cluster/settings", Collections.emptyMap(), new StringEntity(settingJson, ContentType.APPLICATION_JSON), new BasicHeader("Content-Type", "application/json")); + } + + public void acknowledgeAlert(String alertId, String detectorId) throws IOException { + String body = String.format(Locale.getDefault(), "{\"alerts\":[\"%s\"]}", alertId); + Request post = new Request("POST", String.format( + Locale.getDefault(), + "%s/%s/_acknowledge/alerts", + SecurityAnalyticsPlugin.DETECTOR_BASE_URI, + detectorId)); + post.setJsonEntity(body); + Response ackAlertsResponse = client().performRequest(post); + assertNotNull(ackAlertsResponse); + Map ackAlertsResponseMap = entityAsMap(ackAlertsResponse); + assertTrue(((ArrayList) ackAlertsResponseMap.get("missing")).isEmpty()); + assertTrue(((ArrayList) ackAlertsResponseMap.get("failed")).isEmpty()); + assertEquals(((ArrayList) ackAlertsResponseMap.get("acknowledged")).size(), 1); + } + + protected void createNetflowLogIndex(String indexName) throws IOException { + String indexMapping = + " \"properties\": {" + + " \"netflow.source_ipv4_address\": {" + + " \"type\": \"ip\"" + + " }," + + " \"netflow.destination_transport_port\": {" + + " \"type\": \"integer\"" + + " }," + + " \"netflow.destination_ipv4_address\": {" + + " \"type\": \"ip\"" + + " }," + + " \"netflow.source_transport_port\": {" + + " \"type\": \"integer\"" + + " }" + + " }"; + + createIndex(indexName, Settings.EMPTY, indexMapping); + + // Insert sample doc + String sampleDoc = "{" + + " \"netflow.source_ipv4_address\":\"10.50.221.10\"," + + " \"netflow.destination_transport_port\":1234," + + " \"netflow.destination_ipv4_address\":\"10.53.111.14\"," + + " \"netflow.source_transport_port\":4444" + + "}"; + + // Index doc + Request indexRequest = new Request("POST", indexName + "/_doc?refresh=wait_for"); + indexRequest.setJsonEntity(sampleDoc); + Response response = client().performRequest(indexRequest); + assertEquals(HttpStatus.SC_CREATED, response.getStatusLine().getStatusCode()); + // Refresh everything + response = client().performRequest(new Request("POST", "_refresh")); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + } + + + private Map getIndexAPI(String index) throws IOException { + Response resp = makeRequest(client(), "GET", "/" + index + "?expand_wildcards=all", Collections.emptyMap(), null); + return asMap(resp); + } + + private Map getIndexSettingsAPI(String index) throws IOException { + Response resp = makeRequest(client(), "GET", "/" + index + "/_settings?expand_wildcards=all", Collections.emptyMap(), null); + Map respMap = asMap(resp); + return respMap; + } + + protected void doRollover(String datastreamName) throws IOException { + Response response = makeRequest(client(), "POST", datastreamName + "/_rollover", Collections.emptyMap(), null); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + } + + protected void createComponentTemplateWithMappings(String componentTemplateName, String mappings) throws IOException { + + String body = "{\n" + + " \"template\" : {" + + " \"mappings\": {%s}" + + " }" + + "}"; + body = String.format(body, mappings); + Response response = makeRequest( + client(), + "PUT", + "_component_template/" + componentTemplateName, + Collections.emptyMap(), + new StringEntity(body, ContentType.APPLICATION_JSON), + new BasicHeader("Content-Type", "application/json") + ); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + } + + protected void createComposableIndexTemplate(String templateName, List indexPatterns, String componentTemplateName, String mappings, boolean isDatastream) throws IOException { + createComposableIndexTemplate(templateName, indexPatterns, componentTemplateName, mappings, isDatastream, 0); + } + + protected void createComposableIndexTemplate(String templateName, List indexPatterns, String componentTemplateName, String mappings, boolean isDatastream, int priority) throws IOException { + + String body = "{\n" + + (isDatastream ? "\"data_stream\": { }," : "") + + " \"index_patterns\": [" + + indexPatterns.stream().collect( + Collectors.joining(",", "\"", "\"")) + + "]," + + (componentTemplateName == null ? ("\"template\": {\"mappings\": {" + mappings + "}},") : "") + + (componentTemplateName != null ? ("\"composed_of\": [\"" + componentTemplateName + "\"],") : "") + + "\"priority\":" + priority + + "}"; + Response response = makeRequest( + client(), + "PUT", + "_index_template/" + templateName, + Collections.emptyMap(), + new StringEntity(body, ContentType.APPLICATION_JSON), + new BasicHeader("Content-Type", "application/json") + ); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + } + + protected Map getIndexMappingsAPIFlat(String indexName) throws IOException { + Request request = new Request("GET", indexName + "/_mapping"); + Response response = client().performRequest(request); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + Map respMap = (Map) responseAsMap(response).values().iterator().next(); + + MappingsTraverser mappingsTraverser = new MappingsTraverser((Map) respMap.get("mappings"), Set.of()); + Map flatMappings = mappingsTraverser.traverseAndCopyAsFlat(); + return (Map) flatMappings.get("properties"); + } + + protected Map getIndexMappingsAPI(String indexName) throws IOException { + Request request = new Request("GET", indexName + "/_mapping"); + Response response = client().performRequest(request); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + Map respMap = (Map) responseAsMap(response).values().iterator().next(); + return (Map) respMap.get("mappings"); + } + + protected Map getIndexMappingsSAFlat(String indexName) throws IOException { + Request request = new Request("GET", MAPPER_BASE_URI + "?index_name=" + indexName); + Response response = client().performRequest(request); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + Map respMap = (Map) responseAsMap(response).values().iterator().next(); + + MappingsTraverser mappingsTraverser = new MappingsTraverser((Map) respMap.get("mappings"), Set.of()); + Map flatMappings = mappingsTraverser.traverseAndCopyAsFlat(); + return (Map) flatMappings.get("properties"); + } + + + + protected void createMappingsAPI(String indexName, String topicName) throws IOException { + Request request = new Request("POST", MAPPER_BASE_URI); + // both req params and req body are supported + request.setJsonEntity( + "{ \"index_name\":\"" + indexName + "\"," + + " \"rule_topic\":\"" + topicName + "\", " + + " \"partial\":true" + + "}" + ); + Response response = client().performRequest(request); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + } + + protected String getDatastreamWriteIndex(String datastream) throws IOException { + Response response = makeRequest(client(), "GET", "_data_stream/" + datastream, Collections.emptyMap(), null); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + Map respAsMap = responseAsMap(response); + if (respAsMap.containsKey("data_streams")) { + respAsMap = ((ArrayList) respAsMap.get("data_streams")).get(0); + List> indices = (List>) respAsMap.get("indices"); + Map index = indices.get(indices.size() - 1); + return (String) index.get("index_name"); + } else { + respAsMap = (Map) respAsMap.get(datastream); + } + String[] indices = (String[]) respAsMap.get("indices"); + return indices[indices.length - 1]; + } + + protected void createDatastreamAPI(String datastreamName) throws IOException { + //PUT _data_stream/my-data-stream + Request request = new Request("PUT", "_data_stream/" + datastreamName); + Response response = client().performRequest(request); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + } + + + protected void deleteDatastreamAPI(String datastreamName) throws IOException { + Request request = new Request("DELETE", "_data_stream/" + datastreamName); + Response response = client().performRequest(request); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + } + + protected void createSampleDatastream(String datastreamName, String mappings) throws IOException { + createSampleDatastream(datastreamName, mappings, true); + } + + protected void createSampleDatastream(String datastreamName, String mappings, boolean useComponentTemplate) throws IOException { + + String indexPattern = datastreamName + "*"; + + String componentTemplateMappings = "\"properties\": {" + + " \"netflow.destination_transport_port\":{ \"type\": \"long\" }," + + " \"netflow.destination_ipv4_address\":{ \"type\": \"ip\" }" + + "}"; + + if (mappings != null) { + componentTemplateMappings = mappings; + } + + if (useComponentTemplate) { + // Setup index_template + createComponentTemplateWithMappings( + "my_ds_component_template-" + datastreamName, + componentTemplateMappings + ); + } + createComposableIndexTemplate( + "my_index_template_ds-" + datastreamName, + List.of(indexPattern), + useComponentTemplate ? "my_ds_component_template-" + datastreamName : null, + mappings, + true + ); + + createDatastreamAPI(datastreamName); + } + + + protected void restoreAlertsFindingsIMSettings() throws IOException { + updateClusterSetting(ALERT_HISTORY_ROLLOVER_PERIOD.getKey(), "720m"); + updateClusterSetting(ALERT_HISTORY_MAX_DOCS.getKey(), "100000"); + updateClusterSetting(ALERT_HISTORY_INDEX_MAX_AGE.getKey(), "60d"); + updateClusterSetting(ALERT_HISTORY_RETENTION_PERIOD.getKey(), "60d"); + + updateClusterSetting(FINDING_HISTORY_ROLLOVER_PERIOD.getKey(), "720m"); + updateClusterSetting(FINDING_HISTORY_MAX_DOCS.getKey(), "100000"); + updateClusterSetting(FINDING_HISTORY_INDEX_MAX_AGE.getKey(), "60d"); + updateClusterSetting(FINDING_HISTORY_RETENTION_PERIOD.getKey(), "60d"); + + } } \ No newline at end of file diff --git a/src/test/java/org/opensearch/securityanalytics/TestHelpers.java b/src/test/java/org/opensearch/securityanalytics/TestHelpers.java index d94560d17..00e87e8d6 100644 --- a/src/test/java/org/opensearch/securityanalytics/TestHelpers.java +++ b/src/test/java/org/opensearch/securityanalytics/TestHelpers.java @@ -8,19 +8,21 @@ import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.common.xcontent.NamedXContentRegistry; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.commons.alerting.model.IntervalSchedule; import org.opensearch.commons.alerting.model.Schedule; import org.opensearch.commons.alerting.model.action.Action; import org.opensearch.commons.alerting.model.action.Throttle; import org.opensearch.commons.authuser.User; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.script.Script; import org.opensearch.script.ScriptType; +import org.opensearch.securityanalytics.model.CorrelationQuery; +import org.opensearch.securityanalytics.model.CorrelationRule; import org.opensearch.securityanalytics.model.Detector; import org.opensearch.securityanalytics.model.DetectorInput; import org.opensearch.securityanalytics.model.DetectorRule; @@ -37,7 +39,6 @@ import java.util.List; import java.util.Locale; import java.util.Map; -import java.util.function.Function; import java.util.stream.Collectors; import static org.opensearch.test.OpenSearchTestCase.randomInt; @@ -57,6 +58,9 @@ public static Detector randomDetector(List rules) { public static Detector randomDetectorWithInputs(List inputs) { return randomDetector(null, null, null, inputs, List.of(), null, null, null, null); } + public static Detector randomDetectorWithInputs(List inputs, Detector.DetectorType detectorType) { + return randomDetector(null, detectorType, null, inputs, List.of(), null, null, null, null); + } public static Detector randomDetectorWithTriggers(List triggers) { return randomDetector(null, null, null, List.of(), triggers, null, null, null, null); } @@ -65,7 +69,11 @@ public static Detector randomDetectorWithTriggers(List rules, List rules, List triggers, List inputIndices) { + DetectorInput input = new DetectorInput("windows detector for security analytics", inputIndices, Collections.emptyList(), + rules.stream().map(DetectorRule::new).collect(Collectors.toList())); + return randomDetector(null, null, null, List.of(input), triggers, null, null, null, null); + } public static Detector randomDetectorWithInputsAndTriggers(List inputs, List triggers) { return randomDetector(null, null, null, inputs, triggers, null, null, null, null); } @@ -74,6 +82,10 @@ public static Detector randomDetectorWithTriggers(List rules, List inputs, List triggers, Detector.DetectorType detectorType) { + return randomDetector(null, detectorType, null, inputs, triggers, null, null, null, null); + } + public static Detector randomDetector(String name, Detector.DetectorType detectorType, User user, @@ -118,10 +130,10 @@ public static Detector randomDetector(String name, if (triggers.size() == 0) { triggers = new ArrayList<>(); - DetectorTrigger trigger = new DetectorTrigger(null, "windows-trigger", "1", List.of("windows"), List.of("QuarksPwDump Clearing Access History"), List.of("high"), List.of("T0008"), List.of()); + DetectorTrigger trigger = new DetectorTrigger(null, "windows-trigger", "1", List.of(randomDetectorType()), List.of("QuarksPwDump Clearing Access History"), List.of("high"), List.of("T0008"), List.of()); triggers.add(trigger); } - return new Detector(null, null, name, enabled, schedule, lastUpdateTime, enabledTime, detectorType, user, inputs, triggers, Collections.singletonList(""), "", "", "", "", "", ""); + return new Detector(null, null, name, enabled, schedule, lastUpdateTime, enabledTime, detectorType, user, inputs, triggers, Collections.singletonList(""), "", "", "", "", "", "", Collections.emptyMap()); } public static Detector randomDetectorWithNoUser() { @@ -133,7 +145,16 @@ public static Detector randomDetectorWithNoUser() { Instant enabledTime = enabled ? Instant.now().truncatedTo(ChronoUnit.MILLIS) : null; Instant lastUpdateTime = Instant.now().truncatedTo(ChronoUnit.MILLIS); - return new Detector(null, null, name, enabled, schedule, lastUpdateTime, enabledTime, detectorType, null, inputs, Collections.emptyList(),Collections.singletonList(""), "", "", "", "", "", ""); + return new Detector(null, null, name, enabled, schedule, lastUpdateTime, enabledTime, detectorType, null, inputs, Collections.emptyList(),Collections.singletonList(""), "", "", "", "", "", "", Collections.emptyMap()); + } + + public static CorrelationRule randomCorrelationRule(String name) { + name = name.isEmpty()? ">": name; + return new CorrelationRule(CorrelationRule.NO_ID, CorrelationRule.NO_VERSION, name, + List.of( + new CorrelationQuery("vpc_flow1", "dstaddr:192.168.1.*", "network"), + new CorrelationQuery("ad_logs1", "azure.platformlogs.result_type:50126", "ad_ldap") + )); } public static String randomRule() { @@ -165,6 +186,294 @@ public static String randomRule() { "level: high"; } + + + public static String randomNullRule() { + return "title: null field\n" + + "id: 5f92fff9-82e2-48eb-8fc1-8b133556a551\n" + + "description: Detects remote RPC calls to possibly abuse remote encryption service via MS-EFSR\n" + + "references:\n" + + " - https://attack.mitre.org/tactics/TA0008/\n" + + " - https://msrc.microsoft.com/update-guide/vulnerability/CVE-2021-36942\n" + + " - https://github.com/jsecurity101/MSRPC-to-ATTACK/blob/main/documents/MS-EFSR.md\n" + + " - https://github.com/zeronetworks/rpcfirewall\n" + + " - https://zeronetworks.com/blog/stopping_lateral_movement_via_the_rpc_firewall/\n" + + "tags:\n" + + " - attack.defense_evasion\n" + + "status: experimental\n" + + "author: Sagie Dulce, Dekel Paz\n" + + "date: 2022/01/01\n" + + "modified: 2022/01/01\n" + + "logsource:\n" + + " product: rpc_firewall\n" + + " category: application\n" + + " definition: 'Requirements: install and apply the RPC Firew all to all processes with \"audit:true action:block uuid:df1941c5-fe89-4e79-bf10-463657acf44d or c681d488-d850-11d0-8c52-00c04fd90f7e'\n" + + "detection:\n" + + " selection:\n" + + " EventID: 22\n" + + " RecordNumber: null\n" + + " condition: selection\n" + + "falsepositives:\n" + + " - Legitimate usage of remote file encryption\n" + + "level: high"; + } + + public static String randomRuleForMappingView(String field) { + return "title: Remote Encrypting File System Abuse\n" + + "id: 5f92fff9-82e2-48eb-8fc1-8b133556a551\n" + + "description: Detects remote RPC calls to possibly abuse remote encryption service via MS-EFSR\n" + + "references:\n" + + " - https://attack.mitre.org/tactics/TA0008/\n" + + " - https://msrc.microsoft.com/update-guide/vulnerability/CVE-2021-36942\n" + + " - https://github.com/jsecurity101/MSRPC-to-ATTACK/blob/main/documents/MS-EFSR.md\n" + + " - https://github.com/zeronetworks/rpcfirewall\n" + + " - https://zeronetworks.com/blog/stopping_lateral_movement_via_the_rpc_firewall/\n" + + "tags:\n" + + " - attack.defense_evasion\n" + + "status: experimental\n" + + "author: Sagie Dulce, Dekel Paz\n" + + "date: 2022/01/01\n" + + "modified: 2022/01/01\n" + + "logsource:\n" + + " product: rpc_firewall\n" + + " category: application\n" + + " definition: 'Requirements: install and apply the RPC Firewall to all processes with \"audit:true action:block uuid:df1941c5-fe89-4e79-bf10-463657acf44d or c681d488-d850-11d0-8c52-00c04fd90f7e'\n" + + "detection:\n" + + " selection:\n" + + " "+ field + ": 'ACL'\n" + + " condition: selection\n" + + "falsepositives:\n" + + " - Legitimate usage of remote file encryption\n" + + "level: high"; + } + + public static String randomRuleForCustomLogType() { + return "title: Remote Encrypting File System Abuse\n" + + "id: 5f92fff9-82e2-48eb-8fc1-8b133556a551\n" + + "description: Detects remote RPC calls to possibly abuse remote encryption service via MS-EFSR\n" + + "references:\n" + + " - https://attack.mitre.org/tactics/TA0008/\n" + + " - https://msrc.microsoft.com/update-guide/vulnerability/CVE-2021-36942\n" + + " - https://github.com/jsecurity101/MSRPC-to-ATTACK/blob/main/documents/MS-EFSR.md\n" + + " - https://github.com/zeronetworks/rpcfirewall\n" + + " - https://zeronetworks.com/blog/stopping_lateral_movement_via_the_rpc_firewall/\n" + + "tags:\n" + + " - attack.defense_evasion\n" + + "status: experimental\n" + + "author: Sagie Dulce, Dekel Paz\n" + + "date: 2022/01/01\n" + + "modified: 2022/01/01\n" + + "logsource:\n" + + " product: rpc_firewall\n" + + " category: application\n" + + " definition: 'Requirements: install and apply the RPC Firewall to all processes with \"audit:true action:block uuid:df1941c5-fe89-4e79-bf10-463657acf44d or c681d488-d850-11d0-8c52-00c04fd90f7e'\n" + + "detection:\n" + + " selection:\n" + + " EventID: 22\n" + + " Author: 'Hello'\n" + + " condition: selection\n" + + "falsepositives:\n" + + " - Legitimate usage of remote file encryption\n" + + "level: high"; + } + + public static String randomRuleWithAlias() { + return "title: Remote Encrypting File System Abuse\n" + + "id: 5f92fff9-82e2-48eb-8fc1-8b133556a551\n" + + "description: Detects remote RPC calls to possibly abuse remote encryption service via MS-EFSR\n" + + "references:\n" + + " - https://attack.mitre.org/tactics/TA0008/\n" + + " - https://msrc.microsoft.com/update-guide/vulnerability/CVE-2021-36942\n" + + " - https://github.com/jsecurity101/MSRPC-to-ATTACK/blob/main/documents/MS-EFSR.md\n" + + " - https://github.com/zeronetworks/rpcfirewall\n" + + " - https://zeronetworks.com/blog/stopping_lateral_movement_via_the_rpc_firewall/\n" + + "tags:\n" + + " - attack.defense_evasion\n" + + "status: experimental\n" + + "author: Sagie Dulce, Dekel Paz\n" + + "date: 2022/01/01\n" + + "modified: 2022/01/01\n" + + "logsource:\n" + + " product: rpc_firewall\n" + + " category: application\n" + + " definition: 'Requirements: install and apply the RPC Firewall to all processes with \"audit:true action:block uuid:df1941c5-fe89-4e79-bf10-463657acf44d or c681d488-d850-11d0-8c52-00c04fd90f7e'\n" + + "detection:\n" + + " selection:\n" + + " event_uid: 22\n" + + " condition: selection\n" + + "falsepositives:\n" + + " - Legitimate usage of remote file encryption\n" + + "level: high"; + } + + public static String randomRuleWithKeywords() { + return "title: Remote Encrypting File System Abuse\n" + + "id: 5f92fff9-82e2-48eb-8fc1-8b133556a551\n" + + "description: Detects remote RPC calls to possibly abuse remote encryption service via MS-EFSR\n" + + "references:\n" + + " - https://attack.mitre.org/tactics/TA0008/\n" + + " - https://msrc.microsoft.com/update-guide/vulnerability/CVE-2021-36942\n" + + " - https://github.com/jsecurity101/MSRPC-to-ATTACK/blob/main/documents/MS-EFSR.md\n" + + " - https://github.com/zeronetworks/rpcfirewall\n" + + " - https://zeronetworks.com/blog/stopping_lateral_movement_via_the_rpc_firewall/\n" + + "tags:\n" + + " - attack.defense_evasion\n" + + "status: experimental\n" + + "author: Sagie Dulce, Dekel Paz\n" + + "date: 2022/01/01\n" + + "modified: 2022/01/01\n" + + "logsource:\n" + + " product: rpc_firewall\n" + + " category: application\n" + + " definition: 'Requirements: install and apply the RPC Firewall to all processes with \"audit:true action:block uuid:df1941c5-fe89-4e79-bf10-463657acf44d or c681d488-d850-11d0-8c52-00c04fd90f7e'\n" + + "detection:\n" + + " selection:\n" + + " EventID: 21\n" + + " keywords:\n" + + " - 1996\n" + + " - EC2AMAZ*\n" + + " condition: selection or keywords\n" + + "falsepositives:\n" + + " - Legitimate usage of remote file encryption\n" + + "level: high"; + } + + public static String randomRuleWithStringKeywords() { + return "title: Remote Encrypting File System Abuse\n" + + "id: 5f92fff9-82e2-48eb-8fc1-8b133556a551\n" + + "description: Detects remote RPC calls to possibly abuse remote encryption service via MS-EFSR\n" + + "references:\n" + + " - https://attack.mitre.org/tactics/TA0008/\n" + + " - https://msrc.microsoft.com/update-guide/vulnerability/CVE-2021-36942\n" + + " - https://github.com/jsecurity101/MSRPC-to-ATTACK/blob/main/documents/MS-EFSR.md\n" + + " - https://github.com/zeronetworks/rpcfirewall\n" + + " - https://zeronetworks.com/blog/stopping_lateral_movement_via_the_rpc_firewall/\n" + + "tags:\n" + + " - attack.defense_evasion\n" + + "status: experimental\n" + + "author: Sagie Dulce, Dekel Paz\n" + + "date: 2022/01/01\n" + + "modified: 2022/01/01\n" + + "logsource:\n" + + " product: rpc_firewall\n" + + " category: application\n" + + " definition: 'Requirements: install and apply the RPC Firewall to all processes with \"audit:true action:block uuid:df1941c5-fe89-4e79-bf10-463657acf44d or c681d488-d850-11d0-8c52-00c04fd90f7e'\n" + + "detection:\n" + + " selection:\n" + + " EventID: 21\n" + + " keywords:\n" + + " - \"INFO\"\n" + + " condition: selection or keywords\n" + + "falsepositives:\n" + + " - Legitimate usage of remote file encryption\n" + + "level: high"; + } + + public static String randomRuleWithDateKeywords() { + return "title: Remote Encrypting File System Abuse\n" + + "id: 5f92fff9-82e2-48eb-8fc1-8b133556a551\n" + + "description: Detects remote RPC calls to possibly abuse remote encryption service via MS-EFSR\n" + + "references:\n" + + " - https://attack.mitre.org/tactics/TA0008/\n" + + " - https://msrc.microsoft.com/update-guide/vulnerability/CVE-2021-36942\n" + + " - https://github.com/jsecurity101/MSRPC-to-ATTACK/blob/main/documents/MS-EFSR.md\n" + + " - https://github.com/zeronetworks/rpcfirewall\n" + + " - https://zeronetworks.com/blog/stopping_lateral_movement_via_the_rpc_firewall/\n" + + "tags:\n" + + " - attack.defense_evasion\n" + + "status: experimental\n" + + "author: Sagie Dulce, Dekel Paz\n" + + "date: 2022/01/01\n" + + "modified: 2022/01/01\n" + + "logsource:\n" + + " product: rpc_firewall\n" + + " category: application\n" + + " definition: 'Requirements: install and apply the RPC Firewall to all processes with \"audit:true action:block uuid:df1941c5-fe89-4e79-bf10-463657acf44d or c681d488-d850-11d0-8c52-00c04fd90f7e'\n" + + "detection:\n" + + " selection:\n" + + " EventID: 21\n" + + " keywords:\n" + + " - \"2020-02-04T14:59:39.343541+00:00\"\n" + + " condition: selection or keywords\n" + + "falsepositives:\n" + + " - Legitimate usage of remote file encryption\n" + + "level: high"; + } + + public static String countAggregationTestRule() { + return " title: Test\n" + + " id: 39f919f3-980b-4e6f-a975-8af7e507ef2b\n" + + " status: test\n" + + " level: critical\n" + + " description: Detects QuarksPwDump clearing access history in hive\n" + + " author: Florian Roth\n" + + " date: 2017/05/15\n" + + " logsource:\n" + + " category: test_category\n" + + " product: test_product\n" + + " detection:\n" + + " sel:\n" + + " fieldA: valueA\n" + + " fieldB: valueB\n" + + " fieldC: valueC\n" + + " condition: sel | count(*) > 1"; + } + + public static String sumAggregationTestRule() { + return " title: Test\n" + + " id: 39f919f3-980b-4e6f-a975-8af7e507ef2b\n" + + " status: test\n" + + " level: critical\n" + + " description: Detects QuarksPwDump clearing access history in hive\n" + + " author: Florian Roth\n" + + " date: 2017/05/15\n" + + " logsource:\n" + + " category: test_category\n" + + " product: test_product\n" + + " detection:\n" + + " sel:\n" + + " fieldA: 123\n" + + " fieldB: 111\n" + + " fieldC: valueC\n" + + " condition: sel | sum(fieldA) by fieldB > 110"; + } + + public static String productIndexMaxAggRule() { + return " title: Test\n" + + " id: 5f92fff9-82e3-48eb-8fc1-8b133556a551\n" + + " status: test\n" + + " level: critical\n" + + " description: Detects QuarksPwDump clearing access history in hive\n" + + " author: Florian Roth\n" + + " date: 2017/05/15\n" + + " logsource:\n" + + " category: test_category\n" + + " product: test_product\n" + + " detection:\n" + + " sel:\n" + + " fieldA: 123\n" + + " fieldB: 111\n" + + " fieldC: valueC\n" + + " condition: sel | max(fieldA) by fieldB > 110"; + } + + public static String randomProductDocument(){ + return "{\n" + + " \"fieldA\": 123,\n" + + " \"mappedB\": 111,\n" + + " \"fieldC\": \"valueC\"\n" + + "}\n"; + } + + public static String randomProductDocumentWithTime(long time){ + return "{\n" + + " \"fieldA\": 123,\n" + + " \"mappedB\": 111,\n" + + " \"time\": " + (time) + ",\n" + + " \"fieldC\": \"valueC\"\n" + + "}\n"; + } + public static String randomEditedRule() { return "title: Remote Encrypting File System Abuse\n" + "id: 5f92fff9-82e2-48eb-8fc1-8b133556a551\n" + @@ -243,7 +552,7 @@ public static User randomUserEmpty() { } public static String randomDetectorType() { - return "windows"; + return "test_windows"; } public static DetectorInput randomDetectorInput() { @@ -359,51 +668,149 @@ public static String netFlowMappings() { " }"; } - public static String windowsIndexMapping() { - return "\"properties\": {\n" + - " \"AccessList\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"AccessMask\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"Accesses\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"AccountName\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"AccountType\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " },\n" + - " \"Action\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"Address\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"AllowedToDelegateTo\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"Application\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"ApplicationPath\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"AttributeLDAPDisplayName\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"AttributeValue\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"AuditPolicyChanges\": {\n" + + public static String productIndexMapping(){ + return "\"properties\":{\n" + + " \"fieldA\":{\n" + + " \"type\":\"long\"\n" + + " },\n" + + " \"mappedB\":{\n" + + " \"type\":\"long\"\n" + + " },\n" + + " \"time\":{\n" + + " \"type\":\"date\"\n" + + " },\n" + + " \"fieldC\":{\n" + + " \"type\":\"keyword\"\n" + + " }\n" + + "}\n" + + "}"; + } + + public static String productIndexAvgAggRule(){ + return " title: Test\n" + + " id: 39f918f3-981b-4e6f-a975-8af7e507ef2b\n" + + " status: test\n" + + " level: critical\n" + + " description: Detects QuarksPwDump clearing access history in hive\n" + + " author: Florian Roth\n" + + " date: 2017/05/15\n" + + " logsource:\n" + + " category: test_category\n" + + " product: test_product\n" + + " detection:\n" + + " sel:\n" + + " fieldA: 123\n" + + " fieldB: 111\n" + + " fieldC: valueC\n" + + " condition: sel | avg(fieldA) by fieldC > 110"; + } + + public static String randomAggregationRule(String aggFunction, String signAndValue) { + String rule = "title: Remote Encrypting File System Abuse\n" + + "id: 5f92fff9-82e2-48eb-8fc1-8b133556a551\n" + + "description: Detects remote RPC calls to possibly abuse remote encryption service via MS-EFSR\n" + + "references:\n" + + " - https://attack.mitre.org/tactics/TA0008/\n" + + " - https://msrc.microsoft.com/update-guide/vulnerability/CVE-2021-36942\n" + + " - https://github.com/jsecurity101/MSRPC-to-ATTACK/blob/main/documents/MS-EFSR.md\n" + + " - https://github.com/zeronetworks/rpcfirewall\n" + + " - https://zeronetworks.com/blog/stopping_lateral_movement_via_the_rpc_firewall/\n" + + "tags:\n" + + " - attack.defense_evasion\n" + + "status: experimental\n" + + "author: Sagie Dulce, Dekel Paz\n" + + "date: 2022/01/01\n" + + "modified: 2022/01/01\n" + + "logsource:\n" + + " product: rpc_firewall\n" + + " category: application\n" + + " definition: 'Requirements: install and apply the RPC Firewall to all processes with \"audit:true action:block uuid:df1941c5-fe89-4e79-bf10-463657acf44d or c681d488-d850-11d0-8c52-00c04fd90f7e'\n" + + "detection:\n" + + " sel:\n" + + " Opcode: Info\n" + + " condition: sel | %s(SeverityValue) by Version %s\n" + + "falsepositives:\n" + + " - Legitimate usage of remote file encryption\n" + + "level: high"; + return String.format(Locale.ROOT, rule, aggFunction, signAndValue); + } + + public static String randomAggregationRule(String aggFunction, String signAndValue, String opCode) { + String rule = "title: Remote Encrypting File System Abuse\n" + + "id: 5f92fff9-82e2-48eb-8fc1-8b133556a551\n" + + "description: Detects remote RPC calls to possibly abuse remote encryption service via MS-EFSR\n" + + "references:\n" + + " - https://attack.mitre.org/tactics/TA0008/\n" + + " - https://msrc.microsoft.com/update-guide/vulnerability/CVE-2021-36942\n" + + " - https://github.com/jsecurity101/MSRPC-to-ATTACK/blob/main/documents/MS-EFSR.md\n" + + " - https://github.com/zeronetworks/rpcfirewall\n" + + " - https://zeronetworks.com/blog/stopping_lateral_movement_via_the_rpc_firewall/\n" + + "tags:\n" + + " - attack.defense_evasion\n" + + "status: experimental\n" + + "author: Sagie Dulce, Dekel Paz\n" + + "date: 2022/01/01\n" + + "modified: 2022/01/01\n" + + "logsource:\n" + + " product: rpc_firewall\n" + + " category: application\n" + + " definition: 'Requirements: install and apply the RPC Firewall to all processes with \"audit:true action:block uuid:df1941c5-fe89-4e79-bf10-463657acf44d or c681d488-d850-11d0-8c52-00c04fd90f7e'\n" + + "detection:\n" + + " sel:\n" + + " Opcode: %s\n" + + " condition: sel | %s(SeverityValue) by Version %s\n" + + "falsepositives:\n" + + " - Legitimate usage of remote file encryption\n" + + "level: high"; + return String.format(Locale.ROOT, rule, opCode, aggFunction, signAndValue); + } + + public static String windowsIndexMapping() { + return "\"properties\": {\n" + + " \"@timestamp\": {\"type\":\"date\"},\n" + + " \"AccessList\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"AccessMask\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"Accesses\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"AccountName\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"AccountType\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " },\n" + + " \"Action\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"Address\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"AllowedToDelegateTo\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"Application\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"ApplicationPath\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"AttributeLDAPDisplayName\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"AttributeValue\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"AuditPolicyChanges\": {\n" + " \"type\": \"text\"\n" + " },\n" + " \"AuditSourceName\": {\n" + @@ -958,407 +1365,214 @@ public static String windowsIndexMapping() { " \"WorkstationName\": {\n" + " \"type\": \"text\"\n" + " },\n" + - " \"_0\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_1\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_10\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_100\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_101\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_102\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_103\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_104\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_105\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_106\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_107\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_108\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_109\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_11\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_110\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_111\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_112\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_113\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_114\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_115\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_116\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_117\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_118\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_119\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_12\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_120\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_121\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_122\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_123\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_124\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_13\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_14\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_15\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_16\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_17\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_18\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_19\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_2\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_20\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_21\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_22\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_23\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_24\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_25\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_26\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_27\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_28\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_29\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_3\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_30\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_31\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_32\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_33\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_34\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_35\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_36\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_37\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_38\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_39\": {\n" + - " \"type\": \"text\"\n" + + " \"EventID\": {\n" + + " \"type\": \"integer\"\n" + " },\n" + - " \"_4\": {\n" + + " \"param1\": {\n" + " \"type\": \"text\"\n" + " },\n" + - " \"_40\": {\n" + + " \"param2\": {\n" + " \"type\": \"text\"\n" + " },\n" + - " \"_41\": {\n" + + " \"processPath\": {\n" + " \"type\": \"text\"\n" + " },\n" + - " \"_42\": {\n" + + " \"sha1\": {\n" + " \"type\": \"text\"\n" + " },\n" + - " \"_43\": {\n" + + " \"src_ip\": {\n" + " \"type\": \"text\"\n" + " },\n" + - " \"_44\": {\n" + + " \"unmapped_HiveName\": {\n" + " \"type\": \"text\"\n" + + " }\n" + + " }"; + } + + public static String windowsIndexMappingOnlyNumericAndDate() { + return "\"properties\": {\n" + + " \"@timestamp\": {\"type\":\"date\"},\n" + + " \"EventTime\": {\n" + + " \"type\": \"date\"\n" + " },\n" + - " \"_45\": {\n" + - " \"type\": \"text\"\n" + + " \"ExecutionProcessID\": {\n" + + " \"type\": \"long\"\n" + " },\n" + - " \"_46\": {\n" + - " \"type\": \"text\"\n" + + " \"ExecutionThreadID\": {\n" + + " \"type\": \"integer\"\n" + " },\n" + - " \"_47\": {\n" + - " \"type\": \"text\"\n" + + " \"EventID\": {\n" + + " \"type\": \"integer\"\n" + " },\n" + - " \"_48\": {\n" + + " \"TaskValue\": {\n" + + " \"type\": \"integer\"\n" + + " }\n" + + " }"; + } + + public static String windowsIndexMappingOnlyNumericAndText() { + return "\"properties\": {\n" + + " \"TaskName\": {\n" + " \"type\": \"text\"\n" + " },\n" + - " \"_49\": {\n" + - " \"type\": \"text\"\n" + + " \"ExecutionProcessID\": {\n" + + " \"type\": \"long\"\n" + " },\n" + - " \"_5\": {\n" + - " \"type\": \"text\"\n" + + " \"ExecutionThreadID\": {\n" + + " \"type\": \"integer\"\n" + " },\n" + - " \"_50\": {\n" + - " \"type\": \"text\"\n" + + " \"EventID\": {\n" + + " \"type\": \"integer\"\n" + " },\n" + - " \"_51\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_52\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_53\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_54\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_55\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_56\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_57\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_58\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_59\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_6\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_60\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_61\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_62\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_63\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_64\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_65\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_66\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_67\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_68\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_69\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_7\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_70\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_71\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_72\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_73\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_74\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_75\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_76\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_77\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_78\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_79\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_8\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_80\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_81\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_82\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_83\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_84\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_85\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_86\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_87\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_88\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_89\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_9\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_90\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_91\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_92\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_93\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_94\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_95\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_96\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_97\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_98\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"_99\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"EventID\": {\n" + + " \"TaskValue\": {\n" + " \"type\": \"integer\"\n" + - " },\n" + - " \"param1\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"param2\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"processPath\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"sha1\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"src_ip\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"unmapped_HiveName\": {\n" + - " \"type\": \"text\"\n" + " }\n" + " }"; } + + public static String randomDoc(int severity, int version, String opCode) { + String doc = "{\n" + + "\"EventTime\":\"2020-02-04T14:59:39.343541+00:00\",\n" + + "\"HostName\":\"EC2AMAZ-EPO7HKA\",\n" + + "\"Keywords\":\"9223372036854775808\",\n" + + "\"SeverityValue\":%s,\n" + + "\"Severity\":\"INFO\",\n" + + "\"EventID\":22,\n" + + "\"SourceName\":\"Microsoft-Windows-Sysmon\",\n" + + "\"ProviderGuid\":\"{5770385F-C22A-43E0-BF4C-06F5698FFBD9}\",\n" + + "\"Version\":%s,\n" + + "\"TaskValue\":22,\n" + + "\"OpcodeValue\":0,\n" + + "\"RecordNumber\":9532,\n" + + "\"ExecutionProcessID\":1996,\n" + + "\"ExecutionThreadID\":2616,\n" + + "\"Channel\":\"Microsoft-Windows-Sysmon/Operational\",\n" + + "\"Domain\":\"NT AUTHORITY\",\n" + + "\"AccountName\":\"SYSTEM\",\n" + + "\"UserID\":\"S-1-5-18\",\n" + + "\"AccountType\":\"User\",\n" + + "\"Message\":\"Dns query:\\r\\nRuleName: \\r\\nUtcTime: 2020-02-04 14:59:38.349\\r\\nProcessGuid: {b3c285a4-3cda-5dc0-0000-001077270b00}\\r\\nProcessId: 1904\\r\\nQueryName: EC2AMAZ-EPO7HKA\\r\\nQueryStatus: 0\\r\\nQueryResults: 172.31.46.38;\\r\\nImage: C:\\\\Program Files\\\\nxlog\\\\nxlog.exe\",\n" + + "\"Category\":\"Dns query (rule: DnsQuery)\",\n" + + "\"Opcode\":\"%s\",\n" + + "\"UtcTime\":\"2020-02-04 14:59:38.349\",\n" + + "\"ProcessGuid\":\"{b3c285a4-3cda-5dc0-0000-001077270b00}\",\n" + + "\"ProcessId\":\"1904\",\"QueryName\":\"EC2AMAZ-EPO7HKA\",\"QueryStatus\":\"0\",\n" + + "\"QueryResults\":\"172.31.46.38;\",\n" + + "\"Image\":\"C:\\\\Program Files\\\\nxlog\\\\regsvr32.exe\",\n" + + "\"EventReceivedTime\":\"2020-02-04T14:59:40.780905+00:00\",\n" + + "\"SourceModuleName\":\"in\",\n" + + "\"SourceModuleType\":\"im_msvistalog\",\n" + + "\"CommandLine\": \"eachtest\",\n" + + "\"Initiated\": \"true\"\n" + + "}"; + return String.format(Locale.ROOT, doc, severity, version, opCode); + + } + + public static String randomDocOnlyNumericAndDate(int severity, int version, String opCode) { + String doc = "{\n" + + "\"EventTime\":\"2020-02-04T14:59:39.343541+00:00\",\n" + + "\"ExecutionProcessID\":2001,\n" + + "\"ExecutionThreadID\":2616,\n" + + "\"EventID\": 1234,\n" + + "\"TaskValue\":22\n" + + "}"; + return String.format(Locale.ROOT, doc, severity, version, opCode); + } + + public static String randomDocOnlyNumericAndText(int severity, int version, String opCode) { + String doc = "{\n" + + "\"TaskName\":\"SYSTEM\",\n" + + "\"ExecutionProcessID\":2001,\n" + + "\"ExecutionThreadID\":2616,\n" + + "\"EventID\": 1234,\n" + + "\"TaskValue\":22\n" + + "}"; + return String.format(Locale.ROOT, doc, severity, version, opCode); + } + + //Add IPs in HostName field. + public static String randomDocWithIpIoc(int severity, int version, String ioc) { + String doc = "{\n" + + "\"EventTime\":\"2020-02-04T14:59:39.343541+00:00\",\n" + + "\"HostName\":\"%s\",\n" + + "\"Keywords\":\"9223372036854775808\",\n" + + "\"SeverityValue\":%s,\n" + + "\"Severity\":\"INFO\",\n" + + "\"EventID\":22,\n" + + "\"SourceName\":\"Microsoft-Windows-Sysmon\",\n" + + "\"ProviderGuid\":\"{5770385F-C22A-43E0-BF4C-06F5698FFBD9}\",\n" + + "\"Version\":%s,\n" + + "\"TaskValue\":22,\n" + + "\"OpcodeValue\":0,\n" + + "\"RecordNumber\":9532,\n" + + "\"ExecutionProcessID\":1996,\n" + + "\"ExecutionThreadID\":2616,\n" + + "\"Channel\":\"Microsoft-Windows-Sysmon/Operational\",\n" + + "\"Domain\":\"NT AUTHORITY\",\n" + + "\"AccountName\":\"SYSTEM\",\n" + + "\"UserID\":\"S-1-5-18\",\n" + + "\"AccountType\":\"User\",\n" + + "\"Message\":\"Dns query:\\r\\nRuleName: \\r\\nUtcTime: 2020-02-04 14:59:38.349\\r\\nProcessGuid: {b3c285a4-3cda-5dc0-0000-001077270b00}\\r\\nProcessId: 1904\\r\\nQueryName: EC2AMAZ-EPO7HKA\\r\\nQueryStatus: 0\\r\\nQueryResults: 172.31.46.38;\\r\\nImage: C:\\\\Program Files\\\\nxlog\\\\nxlog.exe\",\n" + + "\"Category\":\"Dns query (rule: DnsQuery)\",\n" + + "\"Opcode\":\"blahblah\",\n" + + "\"UtcTime\":\"2020-02-04 14:59:38.349\",\n" + + "\"ProcessGuid\":\"{b3c285a4-3cda-5dc0-0000-001077270b00}\",\n" + + "\"ProcessId\":\"1904\",\"QueryName\":\"EC2AMAZ-EPO7HKA\",\"QueryStatus\":\"0\",\n" + + "\"QueryResults\":\"172.31.46.38;\",\n" + + "\"Image\":\"C:\\\\Program Files\\\\nxlog\\\\regsvr32.exe\",\n" + + "\"EventReceivedTime\":\"2020-02-04T14:59:40.780905+00:00\",\n" + + "\"SourceModuleName\":\"in\",\n" + + "\"SourceModuleType\":\"im_msvistalog\",\n" + + "\"CommandLine\": \"eachtest\",\n" + + "\"Initiated\": \"true\"\n" + + "}"; + return String.format(Locale.ROOT, doc, ioc, severity, version); + + } + + public static String randomDocWithNullField() { + return "{\n" + + "\"@timestamp\":\"2020-02-04T14:59:39.343541+00:00\",\n" + + "\"EventTime\":\"2020-02-04T14:59:39.343541+00:00\",\n" + + "\"HostName\":\"EC2AMAZ-EPO7HKA\",\n" + + "\"Keywords\":\"9223372036854775808\",\n" + + "\"SeverityValue\":2,\n" + + "\"Severity\":\"INFO\",\n" + + "\"EventID\":22,\n" + + "\"SourceName\":\"Microsoft-Windows-Sysmon\",\n" + + "\"ProviderGuid\":\"{5770385F-C22A-43E0-BF4C-06F5698FFBD9}\",\n" + + "\"Version\":5,\n" + + "\"TaskValue\":22,\n" + + "\"OpcodeValue\":0,\n" + + "\"RecordNumber\":null,\n" + + "\"ExecutionProcessID\":1996,\n" + + "\"ExecutionThreadID\":2616,\n" + + "\"Channel\":\"Microsoft-Windows-Sysmon/Operational\",\n" + + "\"Domain\":\"NTAUTHORITY\",\n" + + "\"AccountName\":\"SYSTEM\",\n" + + "\"UserID\":\"S-1-5-18\",\n" + + "\"AccountType\":\"User\",\n" + + "\"Message\":\"Dns query:\\r\\nRuleName: \\r\\nUtcTime: 2020-02-04 14:59:38.349\\r\\nProcessGuid: {b3c285a4-3cda-5dc0-0000-001077270b00}\\r\\nProcessId: 1904\\r\\nQueryName: EC2AMAZ-EPO7HKA\\r\\nQueryStatus: 0\\r\\nQueryResults: 172.31.46.38;\\r\\nImage: C:\\\\Program Files\\\\nxlog\\\\nxlog.exe\",\n" + + "\"Category\":\"Dns query (rule: DnsQuery)\",\n" + + "\"Opcode\":\"Info\",\n" + + "\"UtcTime\":\"2020-02-04 14:59:38.349\",\n" + + "\"ProcessGuid\":\"{b3c285a4-3cda-5dc0-0000-001077270b00}\",\n" + + "\"ProcessId\":\"1904\",\"QueryName\":\"EC2AMAZ-EPO7HKA\",\"QueryStatus\":\"0\",\n" + + "\"QueryResults\":\"172.31.46.38;\",\n" + + "\"Image\":\"C:\\\\Program Files\\\\nxlog\\\\regsvr32.exe\",\n" + + "\"EventReceivedTime\":\"2020-02-04T14:59:40.780905+00:00\",\n" + + "\"SourceModuleName\":\"in\",\n" + + "\"SourceModuleType\":\"im_msvistalog\",\n" + + "\"CommandLine\": \"eachtest\",\n" + + "\"Initiated\": \"true\"\n" + + "}"; + } + public static String randomDoc() { return "{\n" + + "\"@timestamp\":\"2020-02-04T14:59:39.343541+00:00\",\n" + "\"EventTime\":\"2020-02-04T14:59:39.343541+00:00\",\n" + "\"HostName\":\"EC2AMAZ-EPO7HKA\",\n" + "\"Keywords\":\"9223372036854775808\",\n" + @@ -1374,7 +1588,7 @@ public static String randomDoc() { "\"ExecutionProcessID\":1996,\n" + "\"ExecutionThreadID\":2616,\n" + "\"Channel\":\"Microsoft-Windows-Sysmon/Operational\",\n" + - "\"Domain\":\"NT AUTHORITY\",\n" + + "\"Domain\":\"NTAUTHORITY\",\n" + "\"AccountName\":\"SYSTEM\",\n" + "\"UserID\":\"S-1-5-18\",\n" + "\"AccountType\":\"User\",\n" + @@ -1394,6 +1608,118 @@ public static String randomDoc() { "}"; } + public static String randomVpcFlowDoc() { + return "{\n" + + " \"version\": 1,\n" + + " \"account-id\": \"A12345\",\n" + + " \"interface-id\": \"I12345\",\n" + + " \"srcaddr\": \"1.2.3.4\",\n" + + " \"dstaddr\": \"4.5.6.7\",\n" + + " \"srcport\": 9000,\n" + + " \"dstport\": 8000,\n" + + " \"severity_id\": \"-1\",\n" + + " \"class_name\": \"Network Activity\"\n" + + "}"; + } + + public static String randomAdLdapDoc() { + return "{\n" + + " \"azure.platformlogs.result_type\": 50126,\n" + + " \"azure.signinlogs.result_description\": \"Invalid username or password or Invalid on-premises username or password.\",\n" + + " \"azure.signinlogs.props.user_id\": \"DEYSUBHO\"\n" + + "}"; + } + + public static String randomAppLogDoc() { + return "{\n" + + " \"endpoint\": \"/customer_records.txt\",\n" + + " \"http_method\": \"POST\",\n" + + " \"keywords\": \"INVALID\"\n" + + "}"; + } + + public static String randomS3AccessLogDoc() { + return "{\n" + + " \"aws.cloudtrail.eventSource\": \"s3.amazonaws.com\",\n" + + " \"aws.cloudtrail.eventName\": \"ReplicateObject\",\n" + + " \"aws.cloudtrail.eventTime\": 1\n" + + "}"; + } + + public static String adLdapLogMappings() { + return "\"properties\": {\n" + + " \"ResultType\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"ResultDescription\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"azure.signinlogs.props.user_id\": {\n" + + " \"type\": \"text\"\n" + + " }\n" + + " }"; + } + + public static String s3AccessLogMappings() { + return " \"properties\": {" + + " \"aws.cloudtrail.eventSource\": {" + + " \"type\": \"text\"" + + " }," + + " \"aws.cloudtrail.eventName\": {" + + " \"type\": \"text\"" + + " }," + + " \"aws.cloudtrail.eventTime\": {" + + " \"type\": \"integer\"" + + " }" + + " }"; + } + + public static String appLogMappings() { + return " \"properties\": {" + + " \"http_method\": {" + + " \"type\": \"text\"" + + " }," + + " \"endpoint\": {" + + " \"type\": \"text\"" + + " }," + + " \"keywords\": {" + + " \"type\": \"text\"" + + " }" + + " }"; + } + + public static String vpcFlowMappings() { + return " \"properties\": {" + + " \"version\": {" + + " \"type\": \"integer\"" + + " }," + + " \"account-id\": {" + + " \"type\": \"text\"" + + " }," + + " \"interface-id\": {" + + " \"type\": \"text\"" + + " }," + + " \"srcaddr\": {" + + " \"type\": \"text\"" + + " }," + + " \"dstaddr\": {" + + " \"type\": \"text\"" + + " }," + + " \"srcport\": {" + + " \"type\": \"integer\"" + + " }," + + " \"dstport\": {" + + " \"type\": \"integer\"" + + " }," + + " \"severity_id\": {" + + " \"type\": \"text\"" + + " }," + + " \"class_name\": {" + + " \"type\": \"text\"" + + " }" + + " }"; + } + public static XContentParser parser(String xc) throws IOException { XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, xc); parser.nextToken(); diff --git a/src/test/java/org/opensearch/securityanalytics/action/CreateIndexMappingsRequestTests.java b/src/test/java/org/opensearch/securityanalytics/action/CreateIndexMappingsRequestTests.java index ac687a733..a4beab5a4 100644 --- a/src/test/java/org/opensearch/securityanalytics/action/CreateIndexMappingsRequestTests.java +++ b/src/test/java/org/opensearch/securityanalytics/action/CreateIndexMappingsRequestTests.java @@ -8,8 +8,8 @@ import org.opensearch.action.ActionRequestValidationException; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.securityanalytics.action.CreateIndexMappingsRequest; import org.opensearch.test.OpenSearchTestCase; diff --git a/src/test/java/org/opensearch/securityanalytics/action/GetIndexMappingsRequestTests.java b/src/test/java/org/opensearch/securityanalytics/action/GetIndexMappingsRequestTests.java index b6636e2b6..dc9753a06 100644 --- a/src/test/java/org/opensearch/securityanalytics/action/GetIndexMappingsRequestTests.java +++ b/src/test/java/org/opensearch/securityanalytics/action/GetIndexMappingsRequestTests.java @@ -8,8 +8,8 @@ import org.opensearch.action.ActionRequestValidationException; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.securityanalytics.action.GetIndexMappingsRequest; import org.opensearch.test.OpenSearchTestCase; diff --git a/src/test/java/org/opensearch/securityanalytics/action/IndexDetectorRequestTests.java b/src/test/java/org/opensearch/securityanalytics/action/IndexDetectorRequestTests.java index 8bc2dae0f..9144f56ee 100644 --- a/src/test/java/org/opensearch/securityanalytics/action/IndexDetectorRequestTests.java +++ b/src/test/java/org/opensearch/securityanalytics/action/IndexDetectorRequestTests.java @@ -4,11 +4,16 @@ */ package org.opensearch.securityanalytics.action; +import java.util.Collections; +import java.util.stream.Collectors; import org.junit.Assert; import org.opensearch.action.support.WriteRequest; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.rest.RestRequest; +import org.opensearch.securityanalytics.model.Detector; +import org.opensearch.securityanalytics.model.DetectorInput; +import org.opensearch.securityanalytics.model.DetectorRule; import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; @@ -16,6 +21,7 @@ import java.util.UUID; import static org.opensearch.securityanalytics.TestHelpers.randomDetector; +import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithInputs; public class IndexDetectorRequestTests extends OpenSearchTestCase { @@ -34,4 +40,28 @@ public void testIndexDetectorPostRequest() throws IOException { Assert.assertEquals(RestRequest.Method.POST, newRequest.getMethod()); Assert.assertNotNull(newRequest.getDetector()); } + + public void testIndexDetectorPostRequest_2() throws IOException { + String detectorId = UUID.randomUUID().toString(); + + List rules = List.of(UUID.randomUUID().toString()); + DetectorInput input1 = new DetectorInput("windows detector for security analytics", List.of("windows-1"), Collections.emptyList(), + rules.stream().map(DetectorRule::new).collect(Collectors.toList())); + DetectorInput input2 = new DetectorInput("windows detector for security analytics", List.of("windows-2"), Collections.emptyList(), + rules.stream().map(DetectorRule::new).collect(Collectors.toList())); + + Detector detector = randomDetectorWithInputs(List.of(input1, input2)); + IndexDetectorRequest request = new IndexDetectorRequest(detectorId, WriteRequest.RefreshPolicy.IMMEDIATE, RestRequest.Method.POST, detector); + + Assert.assertNotNull(request); + + BytesStreamOutput out = new BytesStreamOutput(); + request.writeTo(out); + + StreamInput sin = StreamInput.wrap(out.bytes().toBytesRef().bytes); + IndexDetectorRequest newRequest = new IndexDetectorRequest(sin); + Assert.assertEquals(detectorId, request.getDetectorId()); + Assert.assertEquals(RestRequest.Method.POST, newRequest.getMethod()); + Assert.assertNotNull(newRequest.getDetector()); + } } \ No newline at end of file diff --git a/src/test/java/org/opensearch/securityanalytics/action/IndexDetectorResponseTests.java b/src/test/java/org/opensearch/securityanalytics/action/IndexDetectorResponseTests.java index 84f930d1b..ad6a110e2 100644 --- a/src/test/java/org/opensearch/securityanalytics/action/IndexDetectorResponseTests.java +++ b/src/test/java/org/opensearch/securityanalytics/action/IndexDetectorResponseTests.java @@ -49,7 +49,8 @@ public void testIndexDetectorPostResponse() throws IOException { DetectorMonitorConfig.getAlertsIndex(Detector.DetectorType.OTHERS_APPLICATION.getDetectorType()), null, null, - DetectorMonitorConfig.getFindingsIndex(Detector.DetectorType.OTHERS_APPLICATION.getDetectorType()) + DetectorMonitorConfig.getFindingsIndex(Detector.DetectorType.OTHERS_APPLICATION.getDetectorType()), + Collections.emptyMap() ); IndexDetectorResponse response = new IndexDetectorResponse("1234", 1L, RestStatus.OK, detector); Assert.assertNotNull(response); diff --git a/src/test/java/org/opensearch/securityanalytics/action/UpdateIndexMappingsRequestTests.java b/src/test/java/org/opensearch/securityanalytics/action/UpdateIndexMappingsRequestTests.java index 5b5028b71..80d6ad048 100644 --- a/src/test/java/org/opensearch/securityanalytics/action/UpdateIndexMappingsRequestTests.java +++ b/src/test/java/org/opensearch/securityanalytics/action/UpdateIndexMappingsRequestTests.java @@ -8,8 +8,8 @@ import org.opensearch.action.ActionRequestValidationException; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.securityanalytics.action.UpdateIndexMappingsRequest; import org.opensearch.test.OpenSearchTestCase; diff --git a/src/test/java/org/opensearch/securityanalytics/action/ValidateRulesRequestTests.java b/src/test/java/org/opensearch/securityanalytics/action/ValidateRulesRequestTests.java new file mode 100644 index 000000000..02cc9d35c --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/action/ValidateRulesRequestTests.java @@ -0,0 +1,58 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.action; + +import java.io.IOException; +import java.util.List; +import java.util.UUID; +import org.junit.Assert; +import org.opensearch.action.support.WriteRequest; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.rest.RestRequest; +import org.opensearch.test.OpenSearchTestCase; + + +import static org.opensearch.securityanalytics.TestHelpers.randomDetector; + +public class ValidateRulesRequestTests extends OpenSearchTestCase { + + public void testValidateRulesRequest_parseXContent() throws IOException { + + String source = "{" + + "\"index_name\": \"my_index_111\"," + + "\"rules\": [ \"rule_id_1\", \"rule_id_2\" ]" + + "}"; + ValidateRulesRequest req; + try (XContentParser xcp = createParser(JsonXContent.jsonXContent, source)) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp); + req = ValidateRulesRequest.parse(xcp); + } + assertEquals("my_index_111", req.getIndexName()); + assertEquals(2, req.getRules().size()); + assertEquals("rule_id_1", req.getRules().get(0)); + assertEquals("rule_id_2", req.getRules().get(1)); + } + + public void testValidateRulesRequest_streams() throws IOException { + String indeName = "my_index_1"; + ValidateRulesRequest request = new ValidateRulesRequest(indeName, List.of("rule_id_1", "rule_id_2")); + Assert.assertNotNull(request); + + BytesStreamOutput out = new BytesStreamOutput(); + request.writeTo(out); + + StreamInput sin = StreamInput.wrap(out.bytes().toBytesRef().bytes); + ValidateRulesRequest newRequest = new ValidateRulesRequest(sin); + assertEquals(indeName, newRequest.getIndexName()); + assertEquals(2, newRequest.getRules().size()); + assertEquals("rule_id_1", newRequest.getRules().get(0)); + assertEquals("rule_id_2", newRequest.getRules().get(1)); + } + +} diff --git a/src/test/java/org/opensearch/securityanalytics/action/ValidateRulesResponseTests.java b/src/test/java/org/opensearch/securityanalytics/action/ValidateRulesResponseTests.java new file mode 100644 index 000000000..0066c1b70 --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/action/ValidateRulesResponseTests.java @@ -0,0 +1,66 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.action; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import org.junit.Assert; +import org.opensearch.common.bytes.BytesReference; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.test.OpenSearchTestCase; + + +import static org.opensearch.securityanalytics.action.ValidateRulesRequest.RULES_FIELD; +import static org.opensearch.securityanalytics.action.ValidateRulesResponse.NONAPPLICABLE_FIELDS; + +public class ValidateRulesResponseTests extends OpenSearchTestCase { + + public void testValidateRulesResponse_parseXContent() throws IOException { + + ValidateRulesResponse response = new ValidateRulesResponse(List.of("rule_id_1")); + BytesReference bytes = BytesReference.bytes(response.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)); + try (XContentParser xcp = createParser(JsonXContent.jsonXContent, bytes)) { + if (xcp.currentToken() == null) { + xcp.nextToken(); + } + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp); + List ruleIds = null; + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + String fieldName = xcp.currentName(); + xcp.nextToken(); + assertEquals(NONAPPLICABLE_FIELDS, fieldName); + ruleIds = new ArrayList<>(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp); + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + ruleIds.add(xcp.text()); + } + } + assertEquals(1, ruleIds.size()); + assertEquals("rule_id_1", ruleIds.get(0)); + } + } + + public void testValidateRulesResponse_streams() throws IOException { + ValidateRulesResponse response = new ValidateRulesResponse(List.of("rule_id_1", "rule_id_2")); + Assert.assertNotNull(response); + + BytesStreamOutput out = new BytesStreamOutput(); + response.writeTo(out); + + StreamInput sin = StreamInput.wrap(out.bytes().toBytesRef().bytes); + ValidateRulesResponse newResponse = new ValidateRulesResponse(sin); + assertEquals(2, newResponse.getNonapplicableFields().size()); + assertEquals("rule_id_1", newResponse.getNonapplicableFields().get(0)); + assertEquals("rule_id_2", newResponse.getNonapplicableFields().get(1)); + } + +} diff --git a/src/test/java/org/opensearch/securityanalytics/alerts/AlertingServiceTests.java b/src/test/java/org/opensearch/securityanalytics/alerts/AlertingServiceTests.java index 4a061525a..b6df74548 100644 --- a/src/test/java/org/opensearch/securityanalytics/alerts/AlertingServiceTests.java +++ b/src/test/java/org/opensearch/securityanalytics/alerts/AlertingServiceTests.java @@ -7,6 +7,7 @@ import java.time.Instant; import java.time.ZoneId; +import java.util.Collections; import java.util.List; import java.util.Map; import org.opensearch.action.ActionListener; @@ -26,6 +27,7 @@ import org.opensearch.securityanalytics.action.GetDetectorResponse; import org.opensearch.securityanalytics.config.monitors.DetectorMonitorConfig; import org.opensearch.securityanalytics.model.Detector; +import org.opensearch.securityanalytics.transport.TransportIndexDetectorAction; import org.opensearch.test.OpenSearchTestCase; @@ -61,7 +63,8 @@ public void testGetAlerts_success() { DetectorMonitorConfig.getAlertsIndex(Detector.DetectorType.OTHERS_APPLICATION.getDetectorType()), null, null, - DetectorMonitorConfig.getFindingsIndex(Detector.DetectorType.OTHERS_APPLICATION.getDetectorType()) + DetectorMonitorConfig.getFindingsIndex(Detector.DetectorType.OTHERS_APPLICATION.getDetectorType()), + Collections.emptyMap() ); GetDetectorResponse getDetectorResponse = new GetDetectorResponse("detector_id123", 1L, RestStatus.OK, detector); @@ -89,7 +92,8 @@ public void testGetAlerts_success() { List.of(), List.of(), Map.of(), - new DataSources() + new DataSources(), + TransportIndexDetectorAction.PLUGIN_OWNER_FIELD ), new DocumentLevelTrigger("trigger_id_1", "my_trigger", "severity_low", List.of(), new Script("")), List.of("finding_id_1"), @@ -119,7 +123,8 @@ public void testGetAlerts_success() { List.of(), List.of(), Map.of(), - new DataSources() + new DataSources(), + TransportIndexDetectorAction.PLUGIN_OWNER_FIELD ), new DocumentLevelTrigger("trigger_id_1", "my_trigger", "severity_low", List.of(), new Script("")), List.of("finding_id_1"), @@ -230,7 +235,8 @@ public void testGetFindings_getFindingsByMonitorIdFailures() { DetectorMonitorConfig.getAlertsIndex(Detector.DetectorType.OTHERS_APPLICATION.getDetectorType()), null, null, - DetectorMonitorConfig.getFindingsIndex(Detector.DetectorType.OTHERS_APPLICATION.getDetectorType()) + DetectorMonitorConfig.getFindingsIndex(Detector.DetectorType.OTHERS_APPLICATION.getDetectorType()), + Collections.emptyMap() ); GetDetectorResponse getDetectorResponse = new GetDetectorResponse("detector_id123", 1L, RestStatus.OK, detector); diff --git a/src/test/java/org/opensearch/securityanalytics/alerts/AlertsIT.java b/src/test/java/org/opensearch/securityanalytics/alerts/AlertsIT.java index 035e39434..c50abfa82 100644 --- a/src/test/java/org/opensearch/securityanalytics/alerts/AlertsIT.java +++ b/src/test/java/org/opensearch/securityanalytics/alerts/AlertsIT.java @@ -13,13 +13,13 @@ import java.util.Locale; import java.util.Map; import java.util.stream.Collectors; - import org.apache.http.HttpStatus; import org.apache.http.entity.StringEntity; import org.apache.http.message.BasicHeader; import org.junit.Assert; import org.opensearch.client.Request; import org.opensearch.client.Response; +import org.opensearch.client.ResponseException; import org.opensearch.commons.alerting.model.action.Action; import org.opensearch.rest.RestStatus; import org.opensearch.search.SearchHit; @@ -34,12 +34,17 @@ import static org.opensearch.securityanalytics.TestHelpers.netFlowMappings; import static org.opensearch.securityanalytics.TestHelpers.randomAction; +import static org.opensearch.securityanalytics.TestHelpers.randomDetectorType; import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithInputsAndTriggers; import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithTriggers; import static org.opensearch.securityanalytics.TestHelpers.randomDoc; import static org.opensearch.securityanalytics.TestHelpers.randomIndex; import static org.opensearch.securityanalytics.TestHelpers.randomRule; import static org.opensearch.securityanalytics.TestHelpers.windowsIndexMapping; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ALERT_HISTORY_INDEX_MAX_AGE; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ALERT_HISTORY_MAX_DOCS; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ALERT_HISTORY_RETENTION_PERIOD; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.ALERT_HISTORY_ROLLOVER_PERIOD; public class AlertsIT extends SecurityAnalyticsRestTestCase { @@ -49,7 +54,7 @@ public void testGetAlerts_success() throws IOException { String rule = randomRule(); - Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", "windows"), + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", randomDetectorType()), new StringEntity(rule), new BasicHeader("Content-Type", "application/json")); Assert.assertEquals("Create rule failed", RestStatus.CREATED, restStatus(createResponse)); @@ -62,7 +67,7 @@ public void testGetAlerts_success() throws IOException { // both req params and req body are supported createMappingRequest.setJsonEntity( "{ \"index_name\":\"" + index + "\"," + - " \"rule_topic\":\"windows\", " + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + " \"partial\":true" + "}" ); @@ -132,7 +137,7 @@ public void testGetAlerts_success() throws IOException { hits = new ArrayList<>(); while (hits.size() == 0) { - hits = executeSearch(DetectorMonitorConfig.getAlertsIndex("windows"), request); + hits = executeSearch(DetectorMonitorConfig.getAlertsIndex(randomDetectorType()), request); } // Call GetAlerts API @@ -160,9 +165,19 @@ public void testGetAlerts_success() throws IOException { assertEquals(((ArrayList) ackAlertsResponseMap.get("acknowledged")).size(), 1); } + public void testGetAlerts_noDetector_failure() throws IOException { + // Call GetAlerts API + Map params = new HashMap<>(); + params.put("detector_id", "nonexistent_detector_id"); + try { + makeRequest(client(), "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); + } catch (ResponseException e) { + assertEquals(HttpStatus.SC_NOT_FOUND, e.getResponse().getStatusLine().getStatusCode()); + } + } @SuppressWarnings("unchecked") - public void testAckAlerts_WithInvalidDetectorAlertsCombination() throws IOException, InterruptedException { + public void testAckAlerts_WithInvalidDetectorAlertsCombination() throws IOException { String index = createTestIndex(randomIndex(), windowsIndexMapping()); // Execute CreateMappingsAction to add alias mapping for index @@ -170,7 +185,7 @@ public void testAckAlerts_WithInvalidDetectorAlertsCombination() throws IOExcept // both req params and req body are supported createMappingRequest.setJsonEntity( "{ \"index_name\":\"" + index + "\"," + - " \"rule_topic\":\"windows\", " + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + " \"partial\":true" + "}" ); @@ -231,7 +246,7 @@ public void testAckAlerts_WithInvalidDetectorAlertsCombination() throws IOExcept hits = new ArrayList<>(); while (hits.size() == 0) { - hits = executeSearch(DetectorMonitorConfig.getAlertsIndex("windows"), request); + hits = executeSearch(DetectorMonitorConfig.getAlertsIndex(randomDetectorType()), request); } // Call GetAlerts API @@ -259,6 +274,129 @@ public void testAckAlerts_WithInvalidDetectorAlertsCombination() throws IOExcept } } + public void testAckAlertsWithInvalidDetector() throws IOException { + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + String rule = randomRule(); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", randomDetectorType()), + new StringEntity(rule), new BasicHeader("Content-Type", "application/json")); + Assert.assertEquals("Create rule failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String createdId = responseBody.get("_id").toString(); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response response = client().performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + createAlertingMonitorConfigIndex(null); + Action triggerAction = randomAction(createDestination()); + + Detector detector = randomDetectorWithInputsAndTriggers(List.of(new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(new DetectorRule(createdId)), + getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList()))), + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(createdId), List.of(), List.of("attack.defense_evasion"), List.of(triggerAction)))); + + createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + responseBody = asMap(createResponse); + + createdId = responseBody.get("_id").toString(); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + createdId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + String monitorId = ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + + indexDoc(index, "1", randomDoc()); + + Response executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(6, noOfSigmaRuleMatches); + + Assert.assertEquals(1, ((Map) executeResults.get("trigger_results")).values().size()); + + for (Map.Entry> triggerResult: ((Map>) executeResults.get("trigger_results")).entrySet()) { + Assert.assertEquals(1, ((Map) triggerResult.getValue().get("action_results")).values().size()); + + for (Map.Entry> alertActionResult: ((Map>) triggerResult.getValue().get("action_results")).entrySet()) { + Map actionResults = alertActionResult.getValue(); + + for (Map.Entry actionResult: actionResults.entrySet()) { + Map actionOutput = ((Map>) actionResult.getValue()).get("output"); + String expectedMessage = triggerAction.getSubjectTemplate().getIdOrCode().replace("{{ctx.detector.name}}", detector.getName()) + .replace("{{ctx.trigger.name}}", "test-trigger").replace("{{ctx.trigger.severity}}", "1"); + + Assert.assertEquals(expectedMessage, actionOutput.get("subject")); + Assert.assertEquals(expectedMessage, actionOutput.get("message")); + } + } + } + + request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + hits = new ArrayList<>(); + + while (hits.size() == 0) { + hits = executeSearch(DetectorMonitorConfig.getAlertsIndex(randomDetectorType()), request); + } + + // Call GetAlerts API + Map params = new HashMap<>(); + params.put("detector_id", createdId); + Response getAlertsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); + Map getAlertsBody = asMap(getAlertsResponse); + // TODO enable asserts here when able + Assert.assertEquals(1, getAlertsBody.get("total_alerts")); + String alertId = (String) ((ArrayList>) getAlertsBody.get("alerts")).get(0).get("id"); + String detectorId = (String) ((ArrayList>) getAlertsBody.get("alerts")).get(0).get("detector_id"); + String body = String.format(Locale.getDefault(), "{\"alerts\":[\"%s\"]}", alertId); + Request post = new Request("POST", String.format( + Locale.getDefault(), + "%s/%s/_acknowledge/alerts", + SecurityAnalyticsPlugin.DETECTOR_BASE_URI, + java.util.UUID.randomUUID())); + post.setJsonEntity(body); + + try { + client().performRequest(post); + } catch (ResponseException ex) { + Assert.assertEquals(HttpStatus.SC_NOT_FOUND, ex.getResponse().getStatusLine().getStatusCode()); + } + + body = String.format(Locale.getDefault(), "{\"alerts\":[\"%s\"]}", java.util.UUID.randomUUID()); + post = new Request("POST", String.format( + Locale.getDefault(), + "%s/%s/_acknowledge/alerts", + SecurityAnalyticsPlugin.DETECTOR_BASE_URI, + detectorId)); + post.setJsonEntity(body); + } + public void testGetAlerts_byDetectorType_success() throws IOException, InterruptedException { String index = createTestIndex(randomIndex(), windowsIndexMapping()); @@ -267,7 +405,7 @@ public void testGetAlerts_byDetectorType_success() throws IOException, Interrupt // both req params and req body are supported createMappingRequest.setJsonEntity( "{ \"index_name\":\"" + index + "\"," + - " \"rule_topic\":\"windows\", " + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + " \"partial\":true" + "}" ); @@ -275,7 +413,7 @@ public void testGetAlerts_byDetectorType_success() throws IOException, Interrupt Response response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("windows"), List.of(), List.of(), List.of(), List.of()))); + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -314,12 +452,12 @@ public void testGetAlerts_byDetectorType_success() throws IOException, Interrupt hits = new ArrayList<>(); while (hits.size() == 0) { - hits = executeSearch(DetectorMonitorConfig.getAlertsIndex("windows"), request); + hits = executeSearch(DetectorMonitorConfig.getAlertsIndex(randomDetectorType()), request); } // Call GetAlerts API Map params = new HashMap<>(); - params.put("detectorType", Detector.DetectorType.WINDOWS.getDetectorType()); + params.put("detectorType", randomDetectorType()); Response getAlertsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); Map getAlertsBody = asMap(getAlertsResponse); // TODO enable asserts here when able @@ -334,7 +472,7 @@ public void testGetAlerts_byDetectorType_multipleDetectors_success() throws IOEx // both req params and req body are supported createMappingRequest.setJsonEntity( "{ \"index_name\":\"" + index1 + "\"," + - " \"rule_topic\":\"windows\", " + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + " \"partial\":true" + "}" ); @@ -354,7 +492,7 @@ public void testGetAlerts_byDetectorType_multipleDetectors_success() throws IOEx Response response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); // Detector 1 - WINDOWS - Detector detector1 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("windows"), List.of(), List.of(), List.of(), List.of()))); + Detector detector1 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector1)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -425,7 +563,7 @@ public void testGetAlerts_byDetectorType_multipleDetectors_success() throws IOEx hits = new ArrayList<>(); while (hits.size() == 0) { - hits = executeSearch(DetectorMonitorConfig.getAlertsIndex("windows"), request); + hits = executeSearch(DetectorMonitorConfig.getAlertsIndex(randomDetectorType()), request); } hits = new ArrayList<>(); while (hits.size() == 0) { @@ -436,7 +574,7 @@ public void testGetAlerts_byDetectorType_multipleDetectors_success() throws IOEx // Call GetAlerts API for WINDOWS detector Map params = new HashMap<>(); - params.put("detectorType", Detector.DetectorType.WINDOWS.getDetectorType()); + params.put("detectorType", randomDetectorType()); Response getAlertsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); Map getAlertsBody = asMap(getAlertsResponse); Assert.assertEquals(1, getAlertsBody.get("total_alerts")); @@ -448,4 +586,334 @@ public void testGetAlerts_byDetectorType_multipleDetectors_success() throws IOEx Assert.assertEquals(1, getAlertsBody.get("total_alerts")); } + + public void testAlertHistoryRollover_maxAge() throws IOException, InterruptedException { + updateClusterSetting(ALERT_HISTORY_ROLLOVER_PERIOD.getKey(), "1s"); + updateClusterSetting(ALERT_HISTORY_MAX_DOCS.getKey(), "1000"); + updateClusterSetting(ALERT_HISTORY_INDEX_MAX_AGE.getKey(), "1s"); + + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response response = client().performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String createdId = responseBody.get("_id").toString(); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + createdId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + String monitorId = ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + + indexDoc(index, "1", randomDoc()); + + Response executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(5, noOfSigmaRuleMatches); + + request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + hits = new ArrayList<>(); + + while (hits.size() == 0) { + hits = executeSearch(DetectorMonitorConfig.getAlertsIndex(randomDetectorType()), request); + } + + List alertIndices = getAlertIndices(detector.getDetectorType()); + while(alertIndices.size() < 3) { + alertIndices = getAlertIndices(detector.getDetectorType()); + Thread.sleep(1000); + } + assertTrue("Did not find 3 alert indices", alertIndices.size() >= 3); + + restoreAlertsFindingsIMSettings(); + } + + public void testAlertHistoryRollover_maxAge_low_retention() throws IOException, InterruptedException { + updateClusterSetting(ALERT_HISTORY_ROLLOVER_PERIOD.getKey(), "1s"); + updateClusterSetting(ALERT_HISTORY_MAX_DOCS.getKey(), "1000"); + updateClusterSetting(ALERT_HISTORY_INDEX_MAX_AGE.getKey(), "1s"); + + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response response = client().performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String createdId = responseBody.get("_id").toString(); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + createdId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + String monitorId = ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + + indexDoc(index, "1", randomDoc()); + + Response executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(5, noOfSigmaRuleMatches); + + request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + hits = new ArrayList<>(); + + while (hits.size() == 0) { + hits = executeSearch(DetectorMonitorConfig.getAlertsIndex(randomDetectorType()), request); + } + + List alertIndices = getAlertIndices(detector.getDetectorType()); + while(alertIndices.size() < 3) { + alertIndices = getAlertIndices(detector.getDetectorType()); + Thread.sleep(1000); + } + assertTrue("Did not find 3 alert indices", alertIndices.size() >= 3); + + updateClusterSetting(ALERT_HISTORY_INDEX_MAX_AGE.getKey(), "1000s"); + updateClusterSetting(ALERT_HISTORY_RETENTION_PERIOD.getKey(), "1s"); + + while(alertIndices.size() != 1) { + alertIndices = getAlertIndices(detector.getDetectorType()); + Thread.sleep(1000); + } + + assertTrue("Did not find 3 alert indices", alertIndices.size() == 1); + + restoreAlertsFindingsIMSettings(); + } + + public void testAlertHistoryRollover_maxDocs() throws IOException, InterruptedException { + updateClusterSetting(ALERT_HISTORY_ROLLOVER_PERIOD.getKey(), "1s"); + updateClusterSetting(ALERT_HISTORY_MAX_DOCS.getKey(), "1"); + + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response response = client().performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String detectorId = responseBody.get("_id").toString(); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + String monitorId = ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + + indexDoc(index, "1", randomDoc()); + + client().performRequest(new Request("POST", "_refresh")); + + Response executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(5, noOfSigmaRuleMatches); + + request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + hits = new ArrayList<>(); + + while (hits.size() == 0) { + hits = executeSearch(DetectorMonitorConfig.getAlertsIndex(randomDetectorType()), request); + } + + Map params = new HashMap<>(); + params.put("detector_id", detectorId); + Response getAlertsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); + Map getAlertsBody = asMap(getAlertsResponse); + // TODO enable asserts here when able + Assert.assertEquals(1, getAlertsBody.get("total_alerts")); + String alertId = (String) ((ArrayList>) getAlertsBody.get("alerts")).get(0).get("id"); + String _detectorId = (String) ((ArrayList>) getAlertsBody.get("alerts")).get(0).get("detector_id"); + + // Ack alert to move it to history index + acknowledgeAlert(alertId, detectorId); + + List alertIndices = getAlertIndices(detector.getDetectorType()); + while(alertIndices.size() < 3) { + alertIndices = getAlertIndices(detector.getDetectorType()); + Thread.sleep(1000); + } + assertTrue("Did not find 3 alert indices", alertIndices.size() >= 3); + + restoreAlertsFindingsIMSettings(); + } + + public void testGetAlertsFromAllIndices() throws IOException, InterruptedException { + updateClusterSetting(ALERT_HISTORY_ROLLOVER_PERIOD.getKey(), "1s"); + updateClusterSetting(ALERT_HISTORY_MAX_DOCS.getKey(), "1"); + + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response response = client().performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String detectorId = responseBody.get("_id").toString(); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + String monitorId = ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + + indexDoc(index, "1", randomDoc()); + + Response executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(5, noOfSigmaRuleMatches); + + request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + hits = new ArrayList<>(); + + while (hits.size() == 0) { + hits = executeSearch(DetectorMonitorConfig.getAlertsIndex(randomDetectorType()), request); + } + + Map params = new HashMap<>(); + params.put("detector_id", detectorId); + Response getAlertsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); + Map getAlertsBody = asMap(getAlertsResponse); + // TODO enable asserts here when able + Assert.assertEquals(1, getAlertsBody.get("total_alerts")); + String alertId = (String) ((ArrayList>) getAlertsBody.get("alerts")).get(0).get("id"); + String _detectorId = (String) ((ArrayList>) getAlertsBody.get("alerts")).get(0).get("detector_id"); + + // Ack alert to move it to history index + acknowledgeAlert(alertId, detectorId); + + List alertIndices = getAlertIndices(detector.getDetectorType()); + // alertIndex + 2 alertHistory indices + while(alertIndices.size() < 3) { + alertIndices = getAlertIndices(detector.getDetectorType()); + Thread.sleep(1000); + } + assertTrue("Did not find 3 alert indices", alertIndices.size() >= 3); + + // Index another doc to generate new alert in alertIndex + indexDoc(index, "2", randomDoc()); + + executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + executeResults = entityAsMap(executeResponse); + noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(5, noOfSigmaRuleMatches); + + client().performRequest(new Request("POST", DetectorMonitorConfig.getAlertsIndex(randomDetectorType()) + "/_refresh")); + + getAlertsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); + getAlertsBody = asMap(getAlertsResponse); + // 1 from alertIndex and 1 from history index + Assert.assertEquals(2, getAlertsBody.get("total_alerts")); + + restoreAlertsFindingsIMSettings(); + } } \ No newline at end of file diff --git a/src/test/java/org/opensearch/securityanalytics/alerts/SecureAlertsRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/alerts/SecureAlertsRestApiIT.java new file mode 100644 index 000000000..ff6cba8bb --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/alerts/SecureAlertsRestApiIT.java @@ -0,0 +1,332 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.alerts; + +import org.apache.http.HttpHost; +import org.apache.http.HttpStatus; +import org.apache.http.entity.StringEntity; +import org.apache.http.message.BasicHeader; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.opensearch.client.Request; +import org.opensearch.client.Response; +import org.opensearch.client.ResponseException; +import org.opensearch.client.RestClient; +import org.opensearch.commons.alerting.model.action.Action; +import org.opensearch.commons.rest.SecureRestClientBuilder; +import org.opensearch.rest.RestStatus; +import org.opensearch.search.SearchHit; +import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; +import org.opensearch.securityanalytics.SecurityAnalyticsRestTestCase; +import org.opensearch.securityanalytics.config.monitors.DetectorMonitorConfig; +import org.opensearch.securityanalytics.model.Detector; +import org.opensearch.securityanalytics.model.DetectorInput; +import org.opensearch.securityanalytics.model.DetectorRule; +import org.opensearch.securityanalytics.model.DetectorTrigger; + +import java.io.IOException; +import java.util.*; +import java.util.stream.Collectors; + +import static org.opensearch.securityanalytics.TestHelpers.*; + +public class SecureAlertsRestApiIT extends SecurityAnalyticsRestTestCase { + + static String SECURITY_ANALYTICS_FULL_ACCESS_ROLE = "security_analytics_full_access"; + static String SECURITY_ANALYTICS_READ_ACCESS_ROLE = "security_analytics_read_access"; + static String TEST_HR_BACKEND_ROLE = "HR"; + static String TEST_IT_BACKEND_ROLE = "IT"; + private final String user = "userAlert"; + private static final String[] EMPTY_ARRAY = new String[0]; + private RestClient userClient; + + @Before + public void create() throws IOException { + String[] backendRoles = { TEST_HR_BACKEND_ROLE }; + createUserWithData(user, user, SECURITY_ANALYTICS_FULL_ACCESS_ROLE, backendRoles ); + if (userClient == null) { + userClient = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[]{}), isHttps(), user, user).setSocketTimeout(60000).build(); + } + } + + @After + public void cleanup() throws IOException { + userClient.close(); + deleteUser(user); + } + + @SuppressWarnings("unchecked") + public void testGetAlerts_byDetectorId_success() throws IOException { + try { + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + // Assign a role to the index + createIndexRole(TEST_HR_ROLE, Collections.emptyList(), indexPermissions, List.of(index)); + String[] users = {user}; + // Assign a role to existing user + createUserRolesMapping(TEST_HR_ROLE, users); + + String rule = randomRule(); + + Response createResponse = makeRequest(userClient, "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", randomDetectorType()), + new StringEntity(rule), new BasicHeader("Content-Type", "application/json")); + Assert.assertEquals("Create rule failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String createdId = responseBody.get("_id").toString(); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response response = userClient.performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + createAlertingMonitorConfigIndex(null); + Action triggerAction = randomAction(createDestination()); + + Detector detector = randomDetectorWithInputsAndTriggers(List.of(new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(new DetectorRule(createdId)), + getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList()))), + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(createdId), List.of(), List.of("attack.defense_evasion"), List.of(triggerAction)))); + + createResponse = makeRequest(userClient, "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + responseBody = asMap(createResponse); + + createdId = responseBody.get("_id").toString(); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + createdId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + String monitorId = ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + + indexDoc(index, "1", randomDoc()); + + Response executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(6, noOfSigmaRuleMatches); + + Assert.assertEquals(1, ((Map) executeResults.get("trigger_results")).values().size()); + + for (Map.Entry> triggerResult: ((Map>) executeResults.get("trigger_results")).entrySet()) { + Assert.assertEquals(1, ((Map) triggerResult.getValue().get("action_results")).values().size()); + + for (Map.Entry> alertActionResult: ((Map>) triggerResult.getValue().get("action_results")).entrySet()) { + Map actionResults = alertActionResult.getValue(); + + for (Map.Entry actionResult: actionResults.entrySet()) { + Map actionOutput = ((Map>) actionResult.getValue()).get("output"); + String expectedMessage = triggerAction.getSubjectTemplate().getIdOrCode().replace("{{ctx.detector.name}}", detector.getName()) + .replace("{{ctx.trigger.name}}", "test-trigger").replace("{{ctx.trigger.severity}}", "1"); + + Assert.assertEquals(expectedMessage, actionOutput.get("subject")); + Assert.assertEquals(expectedMessage, actionOutput.get("message")); + } + } + } + + request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + hits = new ArrayList<>(); + + while (hits.size() == 0) { + hits = executeSearch(DetectorMonitorConfig.getAlertsIndex(randomDetectorType()), request); + } + + // try to do get finding as a user with read access + String userRead = "userReadAlert"; + String[] backendRoles = { TEST_IT_BACKEND_ROLE }; + createUserWithData( userRead, userRead, SECURITY_ANALYTICS_READ_ACCESS_ROLE, backendRoles ); + RestClient userReadOnlyClient = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[]{}), isHttps(), userRead, userRead).setSocketTimeout(60000).build(); + + // Call GetAlerts API + Map params = new HashMap<>(); + params.put("detector_id", createdId); + Response getAlertsResponse = makeRequest(userReadOnlyClient, "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); + Map getAlertsBody = asMap(getAlertsResponse); + Assert.assertEquals(1, getAlertsBody.get("total_alerts")); + + // Enable backend filtering and try to read finding as a user with no backend roles matching the user who created the detector + enableOrDisableFilterBy("true"); + try { + getAlertsResponse = makeRequest(userReadOnlyClient, "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); + } catch (ResponseException e) + { + assertEquals("Get alert failed", RestStatus.FORBIDDEN, restStatus(e.getResponse())); + } + finally { + userReadOnlyClient.close(); + deleteUser(userRead); + } + + // recreate user with matching backend roles and try again + String[] newBackendRoles = { TEST_HR_BACKEND_ROLE }; + createUserWithData( userRead, userRead, SECURITY_ANALYTICS_READ_ACCESS_ROLE, newBackendRoles ); + userReadOnlyClient = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[]{}), isHttps(), userRead, userRead).setSocketTimeout(60000).build(); + getAlertsResponse = makeRequest(userReadOnlyClient, "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); + getAlertsBody = asMap(getAlertsResponse); + Assert.assertEquals(1, getAlertsBody.get("total_alerts")); + userReadOnlyClient.close(); + + // update user with no backend roles and try again + createUser(userRead, userRead, EMPTY_ARRAY); + userReadOnlyClient = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[]{}), isHttps(), userRead, userRead).setSocketTimeout(60000).build(); + try { + getAlertsResponse = makeRequest(userReadOnlyClient, "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); + } catch (ResponseException e) + { + assertEquals("Get alert failed", RestStatus.FORBIDDEN, restStatus(e.getResponse())); + } + finally { + userReadOnlyClient.close(); + deleteUser(userRead); + } + } finally { + tryDeletingRole(TEST_HR_ROLE); + } + + } + + + public void testGetAlerts_byDetectorType_success() throws IOException, InterruptedException { + try { + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + // Assign a role to the index + createIndexRole(TEST_HR_ROLE, Collections.emptyList(), indexPermissions, List.of(index)); + String[] users = {user}; + // Assign a role to existing user + createUserRolesMapping(TEST_HR_ROLE, users); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response response = userClient.performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + + Response createResponse = makeRequest(userClient, "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String createdId = responseBody.get("_id").toString(); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + createdId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + String monitorId = ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + + indexDoc(index, "1", randomDoc()); + + client().performRequest(new Request("POST", "_refresh")); + + Response executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(5, noOfSigmaRuleMatches); + + request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + hits = new ArrayList<>(); + + while (hits.size() == 0) { + hits = executeSearch(DetectorMonitorConfig.getAlertsIndex(randomDetectorType()), request); + } + + // try to do get finding as a user with read access + String userRead = "userReadAlert"; + String[] backendRoles = { TEST_IT_BACKEND_ROLE }; + createUserWithData( userRead, userRead, SECURITY_ANALYTICS_READ_ACCESS_ROLE, backendRoles ); + RestClient userReadOnlyClient = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[]{}), isHttps(), userRead, userRead).setSocketTimeout(60000).build(); + + // Call GetAlerts API + Map params = new HashMap<>(); + params.put("detectorType", randomDetectorType()); + Response getAlertsResponse = makeRequest(userReadOnlyClient, "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); + Map getAlertsBody = asMap(getAlertsResponse); + Assert.assertEquals(1, getAlertsBody.get("total_alerts")); + + // Enable backend filtering and try to read finding as a user with no backend roles matching the user who created the detector + enableOrDisableFilterBy("true"); + try { + getAlertsResponse = makeRequest(userReadOnlyClient, "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); + } catch (ResponseException e) + { + assertEquals("Get alert failed", RestStatus.NOT_FOUND, restStatus(e.getResponse())); + } + finally { + userReadOnlyClient.close(); + deleteUser(userRead); + } + + // recreate user with matching backend roles and try again + String[] newBackendRoles = { TEST_HR_BACKEND_ROLE }; + createUserWithData( userRead, userRead, SECURITY_ANALYTICS_READ_ACCESS_ROLE, newBackendRoles ); + userReadOnlyClient = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[]{}), isHttps(), userRead, userRead).setSocketTimeout(60000).build(); + getAlertsResponse = makeRequest(userReadOnlyClient, "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); + getAlertsBody = asMap(getAlertsResponse); + Assert.assertEquals(1, getAlertsBody.get("total_alerts")); + userReadOnlyClient.close(); + + // update user with no backend roles and try again + createUser(userRead, userRead, EMPTY_ARRAY); + userReadOnlyClient = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[]{}), isHttps(), userRead, userRead).setSocketTimeout(60000).build(); + try { + getAlertsResponse = makeRequest(userReadOnlyClient, "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); + } catch (ResponseException e) + { + assertEquals("Get alert failed", RestStatus.FORBIDDEN, restStatus(e.getResponse())); + } + finally { + userReadOnlyClient.close(); + deleteUser(userRead); + } + } finally { + tryDeletingRole(TEST_HR_ROLE); + } + } + +} \ No newline at end of file diff --git a/src/test/java/org/opensearch/securityanalytics/correlation/CorrelationEngineRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/correlation/CorrelationEngineRestApiIT.java new file mode 100644 index 000000000..cb231c5b8 --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/correlation/CorrelationEngineRestApiIT.java @@ -0,0 +1,374 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.correlation; + +import org.junit.Assert; +import org.opensearch.client.Request; +import org.opensearch.client.Response; +import org.opensearch.rest.RestStatus; +import org.opensearch.search.SearchHit; +import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; +import org.opensearch.securityanalytics.SecurityAnalyticsRestTestCase; +import org.opensearch.securityanalytics.model.CorrelationQuery; +import org.opensearch.securityanalytics.model.CorrelationRule; +import org.opensearch.securityanalytics.model.Detector; +import org.opensearch.securityanalytics.model.DetectorInput; +import org.opensearch.securityanalytics.model.DetectorRule; +import org.opensearch.securityanalytics.model.DetectorTrigger; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.opensearch.securityanalytics.TestHelpers.*; + +public class CorrelationEngineRestApiIT extends SecurityAnalyticsRestTestCase { + + @SuppressWarnings("unchecked") + public void testBasicCorrelationEngineWorkflow() throws IOException { + LogIndices indices = createIndices(); + + String vpcFlowMonitorId = createVpcFlowDetector(indices.vpcFlowsIndex); + String adLdapMonitorId = createAdLdapDetector(indices.adLdapLogsIndex); + String testWindowsMonitorId = createTestWindowsDetector(indices.windowsIndex); + String appLogsMonitorId = createAppLogsDetector(indices.appLogsIndex); + String s3MonitorId = createS3Detector(indices.s3AccessLogsIndex); + + String ruleId = createNetworkToAdLdapToWindowsRule(indices); + createWindowsToAppLogsToS3LogsRule(indices); + + indexDoc(indices.adLdapLogsIndex, "22", randomAdLdapDoc()); + Response executeResponse = executeAlertingMonitor(adLdapMonitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(0, noOfSigmaRuleMatches); + + indexDoc(indices.windowsIndex, "2", randomDoc()); + executeResponse = executeAlertingMonitor(testWindowsMonitorId, Collections.emptyMap()); + executeResults = entityAsMap(executeResponse); + noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(5, noOfSigmaRuleMatches); + + indexDoc(indices.appLogsIndex, "4", randomAppLogDoc()); + executeResponse = executeAlertingMonitor(appLogsMonitorId, Collections.emptyMap()); + executeResults = entityAsMap(executeResponse); + noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(0, noOfSigmaRuleMatches); + + indexDoc(indices.s3AccessLogsIndex, "5", randomS3AccessLogDoc()); + executeResponse = executeAlertingMonitor(s3MonitorId, Collections.emptyMap()); + executeResults = entityAsMap(executeResponse); + noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(0, noOfSigmaRuleMatches); + + indexDoc(indices.vpcFlowsIndex, "1", randomVpcFlowDoc()); + executeResponse = executeAlertingMonitor(vpcFlowMonitorId, Collections.emptyMap()); + executeResults = entityAsMap(executeResponse); + noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(1, noOfSigmaRuleMatches); + + // Call GetFindings API + Map params = new HashMap<>(); + params.put("detectorType", "test_windows"); + Response getFindingsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + Map getFindingsBody = entityAsMap(getFindingsResponse); + String finding = ((List>) getFindingsBody.get("findings")).get(0).get("id").toString(); + + List> correlatedFindings = searchCorrelatedFindings(finding, "test_windows", 300000L, 10); + Assert.assertEquals(1, correlatedFindings.size()); + Assert.assertTrue(correlatedFindings.get(0).get("rules") instanceof List); + Assert.assertEquals(1, ((List) correlatedFindings.get(0).get("rules")).size()); + Assert.assertEquals(ruleId, ((List) correlatedFindings.get(0).get("rules")).get(0)); + } + + @SuppressWarnings("unchecked") + public void testListCorrelationsWorkflow() throws IOException, InterruptedException { + Long startTime = System.currentTimeMillis(); + LogIndices indices = createIndices(); + + String vpcFlowMonitorId = createVpcFlowDetector(indices.vpcFlowsIndex); + String testWindowsMonitorId = createTestWindowsDetector(indices.windowsIndex); + + createNetworkToAdLdapToWindowsRule(indices); + Thread.sleep(30000); + + indexDoc(indices.windowsIndex, "2", randomDoc()); + Response executeResponse = executeAlertingMonitor(testWindowsMonitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(5, noOfSigmaRuleMatches); + + Thread.sleep(30000); + + indexDoc(indices.vpcFlowsIndex, "1", randomVpcFlowDoc()); + executeResponse = executeAlertingMonitor(vpcFlowMonitorId, Collections.emptyMap()); + executeResults = entityAsMap(executeResponse); + noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(1, noOfSigmaRuleMatches); + + Thread.sleep(30000); + Long endTime = System.currentTimeMillis(); + + Request request = new Request("GET", "/_plugins/_security_analytics/correlations?start_timestamp=" + startTime + "&end_timestamp=" + endTime); + Response response = client().performRequest(request); + + Assert.assertEquals(200, response.getStatusLine().getStatusCode()); + Map responseMap = entityAsMap(response); + List results = (List) responseMap.get("findings"); + Assert.assertEquals(1, results.size()); + } + + private LogIndices createIndices() throws IOException { + LogIndices indices = new LogIndices(); + indices.adLdapLogsIndex = createTestIndex("ad_logs", adLdapLogMappings()); + indices.s3AccessLogsIndex = createTestIndex("s3_access_logs", s3AccessLogMappings()); + indices.appLogsIndex = createTestIndex("app_logs", appLogMappings()); + indices.windowsIndex = createTestIndex(randomIndex(), windowsIndexMapping()); + indices.vpcFlowsIndex = createTestIndex("vpc_flow", vpcFlowMappings()); + return indices; + } + + private String createNetworkToAdLdapToWindowsRule(LogIndices indices) throws IOException { + CorrelationQuery query1 = new CorrelationQuery(indices.vpcFlowsIndex, "dstaddr:4.5.6.7", "network"); + CorrelationQuery query2 = new CorrelationQuery(indices.adLdapLogsIndex, "ResultType:50126", "ad_ldap"); + CorrelationQuery query4 = new CorrelationQuery(indices.windowsIndex, "Domain:NTAUTHORI*", "test_windows"); + + CorrelationRule rule = new CorrelationRule(CorrelationRule.NO_ID, CorrelationRule.NO_VERSION, "network to ad_ldap to windows", List.of(query1, query2, query4)); + Request request = new Request("POST", "/_plugins/_security_analytics/correlation/rules"); + request.setJsonEntity(toJsonString(rule)); + Response response = client().performRequest(request); + + Assert.assertEquals(201, response.getStatusLine().getStatusCode()); + return entityAsMap(response).get("_id").toString(); + } + + private String createWindowsToAppLogsToS3LogsRule(LogIndices indices) throws IOException { + CorrelationQuery query1 = new CorrelationQuery(indices.windowsIndex, "HostName:EC2AMAZ-EPO7HKA", "test_windows"); + CorrelationQuery query2 = new CorrelationQuery(indices.appLogsIndex, "endpoint:\\/customer_records.txt", "ad_ldap"); + CorrelationQuery query4 = new CorrelationQuery(indices.s3AccessLogsIndex, "aws.cloudtrail.eventName:ReplicateObject", "s3"); + + CorrelationRule rule = new CorrelationRule(CorrelationRule.NO_ID, CorrelationRule.NO_VERSION, "windows to app_logs to s3 logs", List.of(query1, query2, query4)); + Request request = new Request("POST", "/_plugins/_security_analytics/correlation/rules"); + request.setJsonEntity(toJsonString(rule)); + Response response = client().performRequest(request); + + Assert.assertEquals(201, response.getStatusLine().getStatusCode()); + return entityAsMap(response).get("_id").toString(); + } + + @SuppressWarnings("unchecked") + private String createVpcFlowDetector(String indexName) throws IOException { + Detector vpcFlowDetector = randomDetectorWithInputsAndTriggersAndType(List.of(new DetectorInput("vpc flow detector for security analytics", List.of(indexName), List.of(), + getPrePackagedRules("network").stream().map(DetectorRule::new).collect(Collectors.toList()))), + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("network"), List.of(), List.of(), List.of(), List.of())), Detector.DetectorType.NETWORK); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(vpcFlowDetector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String createdId = responseBody.get("_id").toString(); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + createdId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + return ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + } + + @SuppressWarnings("unchecked") + private String createAdLdapDetector(String indexName) throws IOException { + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{\n" + + " \"index_name\": \"" + indexName + "\",\n" + + " \"rule_topic\": \"ad_ldap\",\n" + + " \"partial\": true,\n" + + " \"alias_mappings\": {\n" + + " \"properties\": {\n" + + " \"azure-signinlogs-properties-user_id\": {\n" + + " \"path\": \"azure.signinlogs.props.user_id\",\n" + + " \"type\": \"alias\"\n" + + " },\n" + + " \"azure-platformlogs-result_type\": {\n" + + " \"path\": \"azure.platformlogs.result_type\",\n" + + " \"type\": \"alias\"\n" + + " },\n" + + " \"azure-signinlogs-result_description\": {\n" + + " \"path\": \"azure.signinlogs.result_description\",\n" + + " \"type\": \"alias\"\n" + + " },\n" + + " \"timestamp\": {\n" + + " \"path\": \"creationTime\",\n" + + " \"type\": \"alias\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}" + ); + + Response response = client().performRequest(createMappingRequest); + assertEquals(RestStatus.OK.getStatus(), response.getStatusLine().getStatusCode()); + + Detector adLdapDetector = randomDetectorWithInputsAndTriggersAndType(List.of(new DetectorInput("ad_ldap logs detector for security analytics", List.of(indexName), List.of(), + getPrePackagedRules("ad_ldap").stream().map(DetectorRule::new).collect(Collectors.toList()))), + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("ad_ldap"), List.of(), List.of(), List.of(), List.of())), Detector.DetectorType.AD_LDAP); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(adLdapDetector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String createdId = responseBody.get("_id").toString(); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + createdId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + return ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + } + + @SuppressWarnings("unchecked") + private String createTestWindowsDetector(String indexName) throws IOException { + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + indexName + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response response = client().performRequest(createMappingRequest); + assertEquals(RestStatus.OK.getStatus(), response.getStatusLine().getStatusCode()); + + Detector windowsDetector = randomDetectorWithInputsAndTriggers(List.of(new DetectorInput("windows detector for security analytics", List.of(indexName), List.of(), + getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList()))), + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(windowsDetector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String createdId = responseBody.get("_id").toString(); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + createdId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + return ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + } + + @SuppressWarnings("unchecked") + private String createAppLogsDetector(String indexName) throws IOException { + Detector appLogsDetector = randomDetectorWithInputsAndTriggersAndType(List.of(new DetectorInput("app logs detector for security analytics", List.of(indexName), List.of(), + getPrePackagedRules("others_application").stream().map(DetectorRule::new).collect(Collectors.toList()))), + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("others_application"), List.of(), List.of(), List.of(), List.of())), Detector.DetectorType.OTHERS_APPLICATION); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(appLogsDetector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String createdId = responseBody.get("_id").toString(); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + createdId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + return ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + } + + @SuppressWarnings("unchecked") + private String createS3Detector(String indexName) throws IOException { + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{\n" + + " \"index_name\": \"s3_access_logs\",\n" + + " \"rule_topic\": \"s3\",\n" + + " \"partial\": true,\n" + + " \"alias_mappings\": {\n" + + " \"properties\": {\n" + + " \"aws-cloudtrail-event_source\": {\n" + + " \"type\": \"alias\",\n" + + " \"path\": \"aws.cloudtrail.event_source\"\n" + + " },\n" + + " \"aws.cloudtrail.event_name\": {\n" + + " \"type\": \"alias\",\n" + + " \"path\": \"aws.cloudtrail.event_name\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}" + ); + + Response response = client().performRequest(createMappingRequest); + assertEquals(RestStatus.OK.getStatus(), response.getStatusLine().getStatusCode()); + + Detector s3AccessLogsDetector = randomDetectorWithInputsAndTriggersAndType(List.of(new DetectorInput("s3 access logs detector for security analytics", List.of(indexName), List.of(), + getPrePackagedRules("s3").stream().map(DetectorRule::new).collect(Collectors.toList()))), + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("s3"), List.of(), List.of(), List.of(), List.of())), Detector.DetectorType.S3); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(s3AccessLogsDetector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String createdId = responseBody.get("_id").toString(); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + createdId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + return ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + } + + static class LogIndices { + String vpcFlowsIndex; + String adLdapLogsIndex; + String windowsIndex; + String appLogsIndex; + String s3AccessLogsIndex; + } +} \ No newline at end of file diff --git a/src/test/java/org/opensearch/securityanalytics/correlation/CorrelationEngineRuleRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/correlation/CorrelationEngineRuleRestApiIT.java new file mode 100644 index 000000000..4fa583381 --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/correlation/CorrelationEngineRuleRestApiIT.java @@ -0,0 +1,36 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.correlation; + +import org.junit.Assert; +import org.opensearch.client.Response; +import org.opensearch.client.ResponseException; +import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; +import org.opensearch.securityanalytics.SecurityAnalyticsRestTestCase; +import org.opensearch.securityanalytics.model.CorrelationRule; + +import java.io.IOException; +import java.util.Collections; + +import static org.opensearch.securityanalytics.TestHelpers.randomCorrelationRule; + +public class CorrelationEngineRuleRestApiIT extends SecurityAnalyticsRestTestCase { + + public void testCreateCorrelationRule() throws IOException { + CorrelationRule rule = randomCorrelationRule("custom-rule"); + Response response = makeRequest(client(), "POST", SecurityAnalyticsPlugin.CORRELATION_RULES_BASE_URI, Collections.emptyMap(), toHttpEntity(rule)); + Assert.assertEquals(201, response.getStatusLine().getStatusCode()); + } + + public void testCreateCorrelationRuleWithInvalidName() { + CorrelationRule rule = randomCorrelationRule(""); + Exception exception = assertThrows(ResponseException.class, () -> { + makeRequest(client(), "POST", SecurityAnalyticsPlugin.CORRELATION_RULES_BASE_URI, Collections.emptyMap(), toHttpEntity(rule)); + }); + String expectedMessage = "{\"error\":{\"root_cause\":[{\"type\":\"action_request_validation_exception\",\"reason\":\"Validation Failed: \"}],\"type\":\"action_request_validation_exception\",\"reason\":\"Validation Failed: \"},\"status\":400}"; + String actualMessage = exception.getMessage(); + Assert.assertTrue(actualMessage.contains(expectedMessage)); + } +} \ No newline at end of file diff --git a/src/test/java/org/opensearch/securityanalytics/correlation/LuceneEngineIT.java b/src/test/java/org/opensearch/securityanalytics/correlation/LuceneEngineIT.java new file mode 100644 index 000000000..9848082b7 --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/correlation/LuceneEngineIT.java @@ -0,0 +1,111 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.correlation; + +import org.apache.lucene.index.VectorSimilarityFunction; +import org.junit.Assert; +import org.opensearch.client.Response; +import org.opensearch.common.Strings; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.securityanalytics.SecurityAnalyticsRestTestCase; +import org.opensearch.securityanalytics.correlation.index.CorrelationParamsContext; +import org.opensearch.securityanalytics.correlation.index.query.CorrelationQueryBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +public class LuceneEngineIT extends SecurityAnalyticsRestTestCase { + + private static final int DIMENSION = 3; + private static final String PROPERTIES_FIELD_NAME = "properties"; + private static final String TYPE_FIELD_NAME = "type"; + private static final String SECURITY_ANALYTICS_VECTOR_TYPE = "sa_vector"; + private static final String DIMENSION_FIELD_NAME = "dimension"; + private static final int M = 16; + private static final int EF_CONSTRUCTION = 128; + private static final String INDEX_NAME = "test-index-1"; + private static final Float[][] TEST_VECTORS = new Float[][]{{ 1.0f, 1.0f, 1.0f }, { 2.0f, 2.0f, 2.0f }, { 3.0f, 3.0f, 3.0f }}; + private static final float[][] TEST_QUERY_VECTORS = new float[][]{ { 1.0f, 1.0f, 1.0f }, { 2.0f, 2.0f, 2.0f }, { 3.0f, 3.0f, 3.0f } }; + private static final Map> VECTOR_SIMILARITY_TO_SCORE = Map.of( + VectorSimilarityFunction.EUCLIDEAN, + (similarity) -> 1 / (1 + similarity), + VectorSimilarityFunction.DOT_PRODUCT, + (similarity) -> (1 + similarity) / 2, + VectorSimilarityFunction.COSINE, + (similarity) -> (1 + similarity) / 2 + ); + + @SuppressWarnings("unchecked") + public void testQuery() throws IOException { + String textField = "text-field"; + String luceneField = "lucene-field"; + XContentBuilder builder = XContentFactory.jsonBuilder() + .startObject() + .startObject(PROPERTIES_FIELD_NAME) + .startObject(textField) + .field(TYPE_FIELD_NAME, "text") + .endObject() + .startObject(luceneField) + .field(TYPE_FIELD_NAME, SECURITY_ANALYTICS_VECTOR_TYPE) + .field(DIMENSION_FIELD_NAME, DIMENSION) + .startObject(CorrelationConstants.CORRELATION_CONTEXT) + .field(CorrelationParamsContext.VECTOR_SIMILARITY_FUNCTION, VectorSimilarityFunction.EUCLIDEAN.name()) + .startObject(CorrelationParamsContext.PARAMETERS) + .field(CorrelationConstants.METHOD_PARAMETER_M, M) + .field(CorrelationConstants.METHOD_PARAMETER_EF_CONSTRUCTION, EF_CONSTRUCTION) + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); + + String mapping = Strings.toString(builder); + createTestIndexWithMappingJson(client(), INDEX_NAME, mapping, getCorrelationDefaultIndexSettings()); + + for (int idx = 0; idx < TEST_VECTORS.length; ++idx) { + addCorrelationDoc(INDEX_NAME, + String.valueOf(idx+1), + List.of(textField, luceneField), + List.of(java.util.UUID.randomUUID().toString(), TEST_VECTORS[idx])); + } + refreshAllIndices(); + Assert.assertEquals(TEST_VECTORS.length, getDocCount(INDEX_NAME)); + + int k = 2; + for (float[] query: TEST_QUERY_VECTORS) { + Response response = searchCorrelationIndex(INDEX_NAME, new CorrelationQueryBuilder(luceneField, query, k), k); + Map responseBody = entityAsMap(response); + Assert.assertEquals(2, ((List) ((Map) responseBody.get("hits")).get("hits")).size()); + @SuppressWarnings("unchecked") + double actualScore1 = Double.parseDouble(((List>) ((Map) responseBody.get("hits")).get("hits")).get(0).get("_score").toString()); + @SuppressWarnings("unchecked") + double actualScore2 = Double.parseDouble(((List>) ((Map) responseBody.get("hits")).get("hits")).get(1).get("_score").toString()); + @SuppressWarnings("unchecked") + List hit1 = ((Map>) ((List>) ((Map) responseBody.get("hits")).get("hits")).get(0).get("_source")).get(luceneField).stream() + .map(Double::floatValue).collect(Collectors.toList()); + float[] resultVector1 = new float[hit1.size()]; + for (int i = 0; i < hit1.size(); ++i) { + resultVector1[i] = hit1.get(i); + } + + @SuppressWarnings("unchecked") + List hit2 = ((Map>) ((List>) ((Map) responseBody.get("hits")).get("hits")).get(1).get("_source")).get(luceneField).stream() + .map(Double::floatValue).collect(Collectors.toList()); + float[] resultVector2 = new float[hit2.size()]; + for (int i = 0; i < hit2.size(); ++i) { + resultVector2[i] = hit2.get(i); + } + + double rawScore1 = VectorSimilarityFunction.EUCLIDEAN.compare(resultVector1, query); + Assert.assertEquals(rawScore1, actualScore1, 0.0001); + double rawScore2 = VectorSimilarityFunction.EUCLIDEAN.compare(resultVector2, query); + Assert.assertEquals(rawScore2, actualScore2, 0.0001); + } + } +} \ No newline at end of file diff --git a/src/test/java/org/opensearch/securityanalytics/findings/FindingDtoTests.java b/src/test/java/org/opensearch/securityanalytics/findings/FindingDtoTests.java new file mode 100644 index 000000000..7877410be --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/findings/FindingDtoTests.java @@ -0,0 +1,44 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.findings; + +import java.time.Instant; +import java.util.List; +import org.opensearch.commons.alerting.model.DocLevelQuery; +import org.opensearch.commons.alerting.model.FindingDocument; +import org.opensearch.securityanalytics.action.FindingDto; +import org.opensearch.test.OpenSearchTestCase; + +public class FindingDtoTests extends OpenSearchTestCase { + + + public void testFindingDTO_creation() { + + FindingDocument findingDocument1 = new FindingDocument("test_index1", "doc1", true, "document 1 payload"); + FindingDocument findingDocument2 = new FindingDocument("test_index1", "doc2", true, "document 2 payload"); + FindingDocument findingDocument3 = new FindingDocument("test_index1", "doc3", true, "document 3 payload"); + + Instant now = Instant.now(); + + FindingDto findingDto = new FindingDto( + "detectorId", + "findingId", + List.of("doc1", "doc2", "doc3"), + "my_index", + List.of(new DocLevelQuery("1","myQuery","fieldA:valABC", List.of())), + now, + List.of(findingDocument1, findingDocument2, findingDocument3) + ); + + assertEquals("detectorId", findingDto.getDetectorId()); + assertEquals("findingId", findingDto.getId()); + assertEquals(List.of("doc1", "doc2", "doc3"), findingDto.getRelatedDocIds()); + assertEquals("my_index", findingDto.getIndex()); + assertEquals(List.of(new DocLevelQuery("1","myQuery","fieldA:valABC", List.of())), findingDto.getDocLevelQueries()); + assertEquals(now, findingDto.getTimestamp()); + assertEquals(List.of(findingDocument1, findingDocument2, findingDocument3), findingDto.getDocuments()); + } + +} diff --git a/src/test/java/org/opensearch/securityanalytics/findings/FindingIT.java b/src/test/java/org/opensearch/securityanalytics/findings/FindingIT.java index c019363dd..daac0bb87 100644 --- a/src/test/java/org/opensearch/securityanalytics/findings/FindingIT.java +++ b/src/test/java/org/opensearch/securityanalytics/findings/FindingIT.java @@ -10,12 +10,12 @@ import java.util.HashMap; import java.util.List; import java.util.Map; - import java.util.stream.Collectors; import org.apache.http.HttpStatus; import org.junit.Assert; import org.opensearch.client.Request; import org.opensearch.client.Response; +import org.opensearch.client.ResponseException; import org.opensearch.rest.RestStatus; import org.opensearch.search.SearchHit; import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; @@ -26,10 +26,15 @@ import org.opensearch.securityanalytics.model.DetectorTrigger; import static org.opensearch.securityanalytics.TestHelpers.netFlowMappings; +import static org.opensearch.securityanalytics.TestHelpers.randomDetectorType; import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithTriggers; import static org.opensearch.securityanalytics.TestHelpers.randomDoc; import static org.opensearch.securityanalytics.TestHelpers.randomIndex; import static org.opensearch.securityanalytics.TestHelpers.windowsIndexMapping; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.FINDING_HISTORY_INDEX_MAX_AGE; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.FINDING_HISTORY_MAX_DOCS; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.FINDING_HISTORY_RETENTION_PERIOD; +import static org.opensearch.securityanalytics.settings.SecurityAnalyticsSettings.FINDING_HISTORY_ROLLOVER_PERIOD; public class FindingIT extends SecurityAnalyticsRestTestCase { @@ -42,7 +47,7 @@ public void testGetFindings_byDetectorId_success() throws IOException { // both req params and req body are supported createMappingRequest.setJsonEntity( "{ \"index_name\":\"" + index + "\"," + - " \"rule_topic\":\"windows\", " + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + " \"partial\":true" + "}" ); @@ -50,7 +55,7 @@ public void testGetFindings_byDetectorId_success() throws IOException { Response response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("windows"), List.of(), List.of(), List.of(), List.of()))); + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -86,6 +91,16 @@ public void testGetFindings_byDetectorId_success() throws IOException { Assert.assertEquals(1, getFindingsBody.get("total_findings")); } + public void testGetFindings_noDetector_failure() throws IOException { + Map params = new HashMap<>(); + params.put("detector_id", "nonexistent_id"); + try { + makeRequest(client(), "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + } catch (ResponseException e) { + assertEquals(HttpStatus.SC_NOT_FOUND, e.getResponse().getStatusLine().getStatusCode()); + } + } + public void testGetFindings_byDetectorType_oneDetector_success() throws IOException { String index = createTestIndex(randomIndex(), windowsIndexMapping()); @@ -94,7 +109,7 @@ public void testGetFindings_byDetectorType_oneDetector_success() throws IOExcept // both req params and req body are supported createMappingRequest.setJsonEntity( "{ \"index_name\":\"" + index + "\"," + - " \"rule_topic\":\"windows\", " + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + " \"partial\":true" + "}" ); @@ -102,7 +117,7 @@ public void testGetFindings_byDetectorType_oneDetector_success() throws IOExcept Response response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("windows"), List.of(), List.of(), List.of(), List.of()))); + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -132,7 +147,7 @@ public void testGetFindings_byDetectorType_oneDetector_success() throws IOExcept Assert.assertEquals(5, noOfSigmaRuleMatches); // Call GetFindings API Map params = new HashMap<>(); - params.put("detectorType", detector.getDetectorType().toUpperCase()); + params.put("detectorType", detector.getDetectorType()); Response getFindingsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); Map getFindingsBody = entityAsMap(getFindingsResponse); Assert.assertEquals(1, getFindingsBody.get("total_findings")); @@ -146,10 +161,14 @@ public void testGetFindings_byDetectorType_success() throws IOException { // both req params and req body are supported createMappingRequest.setJsonEntity( "{ \"index_name\":\"" + index1 + "\"," + - " \"rule_topic\":\"windows\", " + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + " \"partial\":true" + "}" ); + + Response response = client().performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + // index 2 String index2 = createTestIndex("netflow_test", netFlowMappings()); @@ -163,10 +182,10 @@ public void testGetFindings_byDetectorType_success() throws IOException { "}" ); - Response response = client().performRequest(createMappingRequest); + response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); // Detector 1 - WINDOWS - Detector detector1 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("windows"), List.of(), List.of(), List.of(), List.of()))); + Detector detector1 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector1)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -219,7 +238,7 @@ public void testGetFindings_byDetectorType_success() throws IOException { Map executeResults = entityAsMap(executeResponse); int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); - Assert.assertEquals(3, noOfSigmaRuleMatches); + Assert.assertEquals(5, noOfSigmaRuleMatches); // execute monitor 2 executeResponse = executeAlertingMonitor(monitorId2, Collections.emptyMap()); @@ -232,15 +251,237 @@ public void testGetFindings_byDetectorType_success() throws IOException { // Call GetFindings API for first detector Map params = new HashMap<>(); - params.put("detectorType", detector1.getDetectorType().toUpperCase()); + params.put("detectorType", detector1.getDetectorType()); Response getFindingsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); Map getFindingsBody = entityAsMap(getFindingsResponse); Assert.assertEquals(1, getFindingsBody.get("total_findings")); // Call GetFindings API for second detector params.clear(); - params.put("detectorType", detector2.getDetectorType().toUpperCase()); + params.put("detectorType", detector2.getDetectorType()); + getFindingsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + getFindingsBody = entityAsMap(getFindingsResponse); + Assert.assertEquals(1, getFindingsBody.get("total_findings")); + } + + public void testGetFindings_rolloverByMaxAge_success() throws IOException, InterruptedException { + + updateClusterSetting(FINDING_HISTORY_ROLLOVER_PERIOD.getKey(), "1s"); + updateClusterSetting(FINDING_HISTORY_INDEX_MAX_AGE.getKey(), "1s"); + + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response response = client().performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String detectorId = responseBody.get("_id").toString(); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + String monitorId = ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + + // Execute monitor first time to create findings index/alias + indexDoc(index, "1", randomDoc()); + Response executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + + // Wait for findings index to rollover first, to make sure that our rollover applied correct settings/mappings + List findingIndices = getFindingIndices(detector.getDetectorType()); + while(findingIndices.size() < 2) { + findingIndices = getFindingIndices(detector.getDetectorType()); + Thread.sleep(1000); + } + assertTrue("Did not find more then 2 finding indices", findingIndices.size() >= 2); + + // Execute monitor second time to insert finding in new rollover'd index + indexDoc(index, "2", randomDoc()); + executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(5, noOfSigmaRuleMatches); + // Call GetFindings API + Map params = new HashMap<>(); + params.put("detector_id", detectorId); + Response getFindingsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + Map getFindingsBody = entityAsMap(getFindingsResponse); + Assert.assertEquals(2, getFindingsBody.get("total_findings")); + + restoreAlertsFindingsIMSettings(); + } + + public void testGetFindings_rolloverByMaxDoc_success() throws IOException, InterruptedException { + + updateClusterSetting(FINDING_HISTORY_ROLLOVER_PERIOD.getKey(), "1s"); + updateClusterSetting(FINDING_HISTORY_MAX_DOCS.getKey(), "1"); + + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response response = client().performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String detectorId = responseBody.get("_id").toString(); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + String monitorId = ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + + indexDoc(index, "1", randomDoc()); + + Response executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(5, noOfSigmaRuleMatches); + // Call GetFindings API + Map params = new HashMap<>(); + params.put("detector_id", detectorId); + Response getFindingsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + Map getFindingsBody = entityAsMap(getFindingsResponse); + Assert.assertEquals(1, getFindingsBody.get("total_findings")); + + List findingIndices = getFindingIndices(detector.getDetectorType()); + while(findingIndices.size() < 2) { + findingIndices = getFindingIndices(detector.getDetectorType()); + Thread.sleep(1000); + } + assertTrue("Did not find 3 alert indices", findingIndices.size() >= 2); + + restoreAlertsFindingsIMSettings(); + } + + public void testGetFindings_rolloverByMaxDoc_short_retention_success() throws IOException, InterruptedException { + updateClusterSetting(FINDING_HISTORY_ROLLOVER_PERIOD.getKey(), "1s"); + updateClusterSetting(FINDING_HISTORY_MAX_DOCS.getKey(), "1"); + + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response response = client().performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String detectorId = responseBody.get("_id").toString(); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + String monitorId = ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + + indexDoc(index, "1", randomDoc()); + + Response executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(5, noOfSigmaRuleMatches); + // Call GetFindings API + Map params = new HashMap<>(); + params.put("detector_id", detectorId); + client().performRequest(new Request("POST", "_refresh")); + Response getFindingsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + Map getFindingsBody = entityAsMap(getFindingsResponse); + Assert.assertEquals(1, getFindingsBody.get("total_findings")); + + List findingIndices = getFindingIndices(detector.getDetectorType()); + while(findingIndices.size() < 2) { + findingIndices = getFindingIndices(detector.getDetectorType()); + Thread.sleep(1000); + } + assertTrue("Did not find 3 findings indices", findingIndices.size() >= 2); + + updateClusterSetting(FINDING_HISTORY_RETENTION_PERIOD.getKey(), "1s"); + updateClusterSetting(FINDING_HISTORY_MAX_DOCS.getKey(), "1000"); + while(findingIndices.size() != 1) { + findingIndices = getFindingIndices(detector.getDetectorType()); + Thread.sleep(1000); + } + + assertTrue("Found finding indices but expected none", findingIndices.size() == 1); + + // Exec monitor again to make sure that current + indexDoc(index, "2", randomDoc()); + + executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + executeResults = entityAsMap(executeResponse); + + noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(5, noOfSigmaRuleMatches); + client().performRequest(new Request("POST", "_refresh")); getFindingsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); getFindingsBody = entityAsMap(getFindingsResponse); Assert.assertEquals(1, getFindingsBody.get("total_findings")); + + restoreAlertsFindingsIMSettings(); } } diff --git a/src/test/java/org/opensearch/securityanalytics/findings/FindingServiceTests.java b/src/test/java/org/opensearch/securityanalytics/findings/FindingServiceTests.java index c5c0cb425..993a13321 100644 --- a/src/test/java/org/opensearch/securityanalytics/findings/FindingServiceTests.java +++ b/src/test/java/org/opensearch/securityanalytics/findings/FindingServiceTests.java @@ -8,8 +8,10 @@ import java.time.Instant; import java.time.ZoneId; import java.util.ArrayDeque; +import java.util.Collections; import java.util.List; import java.util.Queue; +import java.util.stream.Collectors; import org.opensearch.action.ActionListener; import org.opensearch.client.Client; import org.opensearch.commons.alerting.model.CronSchedule; @@ -61,7 +63,8 @@ public void testGetFindings_success() { DetectorMonitorConfig.getAlertsIndex(Detector.DetectorType.OTHERS_APPLICATION.getDetectorType()), null, null, - DetectorMonitorConfig.getFindingsIndex(Detector.DetectorType.OTHERS_APPLICATION.getDetectorType()) + DetectorMonitorConfig.getFindingsIndex(Detector.DetectorType.OTHERS_APPLICATION.getDetectorType()), + Collections.emptyMap() ); GetDetectorResponse getDetectorResponse = new GetDetectorResponse("detector_id123", 1L, RestStatus.OK, detector); @@ -76,6 +79,7 @@ public void testGetFindings_success() { Finding finding1 = new Finding( "1", List.of("doc1", "doc2", "doc3"), + List.of("doc1", "doc2", "doc3"), "monitor_id1", "monitor_name1", "test_index1", @@ -90,6 +94,7 @@ public void testGetFindings_success() { Finding finding2 = new Finding( "1", List.of("doc21", "doc22"), + List.of("doc21", "doc22"), "monitor_id2", "monitor_name2", "test_index2", @@ -176,7 +181,8 @@ public void testGetFindings_getFindingsByMonitorIdFailure() { DetectorMonitorConfig.getAlertsIndex(Detector.DetectorType.OTHERS_APPLICATION.getDetectorType()), null, null, - DetectorMonitorConfig.getFindingsIndex(Detector.DetectorType.OTHERS_APPLICATION.getDetectorType()) + DetectorMonitorConfig.getFindingsIndex(Detector.DetectorType.OTHERS_APPLICATION.getDetectorType()), + Collections.emptyMap() ); GetDetectorResponse getDetectorResponse = new GetDetectorResponse("detector_id123", 1L, RestStatus.OK, detector); diff --git a/src/test/java/org/opensearch/securityanalytics/findings/SecureFindingRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/findings/SecureFindingRestApiIT.java new file mode 100644 index 000000000..17add4956 --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/findings/SecureFindingRestApiIT.java @@ -0,0 +1,334 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.securityanalytics.findings; + +import org.apache.http.HttpHost; +import org.apache.http.HttpStatus; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.opensearch.client.Request; +import org.opensearch.client.Response; +import org.opensearch.client.ResponseException; +import org.opensearch.client.RestClient; +import org.opensearch.commons.rest.SecureRestClientBuilder; +import org.opensearch.rest.RestStatus; +import org.opensearch.search.SearchHit; +import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; +import org.opensearch.securityanalytics.SecurityAnalyticsRestTestCase; +import org.opensearch.securityanalytics.model.Detector; +import org.opensearch.securityanalytics.model.DetectorInput; +import org.opensearch.securityanalytics.model.DetectorRule; +import org.opensearch.securityanalytics.model.DetectorTrigger; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.opensearch.securityanalytics.TestHelpers.netFlowMappings; +import static org.opensearch.securityanalytics.TestHelpers.randomDetectorType; +import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithTriggers; +import static org.opensearch.securityanalytics.TestHelpers.randomDoc; +import static org.opensearch.securityanalytics.TestHelpers.randomIndex; +import static org.opensearch.securityanalytics.TestHelpers.windowsIndexMapping; + +public class SecureFindingRestApiIT extends SecurityAnalyticsRestTestCase { + + static String SECURITY_ANALYTICS_FULL_ACCESS_ROLE = "security_analytics_full_access"; + static String SECURITY_ANALYTICS_READ_ACCESS_ROLE = "security_analytics_read_access"; + static String TEST_HR_BACKEND_ROLE = "HR"; + static String TEST_IT_BACKEND_ROLE = "IT"; + private final String user = "userFinding"; + private static final String[] EMPTY_ARRAY = new String[0]; + private RestClient userClient; + + + @Before + public void create() throws IOException { + String[] backendRoles = { TEST_HR_BACKEND_ROLE }; + createUserWithData(user, user, SECURITY_ANALYTICS_FULL_ACCESS_ROLE, backendRoles ); + if (userClient == null) { + userClient = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[]{}), isHttps(), user, user).setSocketTimeout(60000).build(); + } + } + + @After + public void cleanup() throws IOException { + userClient.close(); + deleteUser(user); + } + + @SuppressWarnings("unchecked") + public void testGetFindings_byDetectorId_success() throws IOException { + try { + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + // Assign a role to the index + createIndexRole(TEST_HR_ROLE, Collections.emptyList(), indexPermissions, List.of(index)); + String[] users = {user}; + // Assign a role to existing user + createUserRolesMapping(TEST_HR_ROLE, users); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response response = userClient.performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + + Response createResponse = makeRequest(userClient, "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String createdId = responseBody.get("_id").toString(); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + createdId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + String monitorId = ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + + indexDoc(index, "1", randomDoc()); + + Response executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(5, noOfSigmaRuleMatches); + + // try to do get finding as a user with read access + String userRead = "userReadFinding"; + String[] backendRoles = { TEST_IT_BACKEND_ROLE }; + createUserWithData( userRead, userRead, SECURITY_ANALYTICS_READ_ACCESS_ROLE, backendRoles ); + RestClient userReadOnlyClient = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[]{}), isHttps(), userRead, userRead).setSocketTimeout(60000).build(); + // Call GetFindings API + Map params = new HashMap<>(); + params.put("detector_id", createdId); + Response getFindingsResponse = makeRequest(userReadOnlyClient, "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + Map getFindingsBody = entityAsMap(getFindingsResponse); + Assert.assertEquals(1, getFindingsBody.get("total_findings")); + + // Enable backend filtering and try to read finding as a user with no backend roles matching the user who created the detector + enableOrDisableFilterBy("true"); + try { + getFindingsResponse = makeRequest(userReadOnlyClient, "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + } catch (ResponseException e) + { + assertEquals("Get finding failed", RestStatus.FORBIDDEN, restStatus(e.getResponse())); + } + finally { + userReadOnlyClient.close(); + deleteUser(userRead); + } + + // recreate user with matching backend roles and try again + String[] newBackendRoles = { TEST_HR_BACKEND_ROLE }; + createUserWithData( userRead, userRead, SECURITY_ANALYTICS_READ_ACCESS_ROLE, newBackendRoles ); + userReadOnlyClient = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[]{}), isHttps(), userRead, userRead).setSocketTimeout(60000).build(); + getFindingsResponse = makeRequest(userReadOnlyClient, "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + getFindingsBody = entityAsMap(getFindingsResponse); + Assert.assertEquals(1, getFindingsBody.get("total_findings")); + userReadOnlyClient.close(); + + // update user with no backend roles and try again + createUser(userRead, userRead, EMPTY_ARRAY); + userReadOnlyClient = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[]{}), isHttps(), userRead, userRead).setSocketTimeout(60000).build(); + try { + getFindingsResponse = makeRequest(userReadOnlyClient, "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + } catch (ResponseException e) + { + assertEquals("Get finding failed", RestStatus.FORBIDDEN, restStatus(e.getResponse())); + } + finally { + userReadOnlyClient.close(); + deleteUser(userRead); + } + + } finally { + tryDeletingRole(TEST_HR_ROLE); + } + } + + public void testGetFindings_byDetectorType_success() throws IOException { + try { + String index1 = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index1 + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + Response response = userClient.performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + // index 2 + String index2 = createTestIndex("netflow_test", netFlowMappings()); + + // Execute CreateMappingsAction to add alias mapping for index + createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index2 + "\"," + + " \"rule_topic\":\"netflow\", " + + " \"partial\":true" + + "}" + ); + + response = userClient.performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + createIndexRole(TEST_HR_ROLE, Collections.emptyList(), indexPermissions, List.of(index1, index2)); + String[] users = {user}; + createUserRolesMapping(TEST_HR_ROLE, users); + + // Detector 1 - WINDOWS + Detector detector1 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + Response createResponse = makeRequest(userClient, "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector1)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String createdId = responseBody.get("_id").toString(); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + createdId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + String monitorId1 = ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + // Detector 2 - NETWORK + DetectorInput inputNetflow = new DetectorInput("windows detector for security analytics", List.of("netflow_test"), Collections.emptyList(), + getPrePackagedRules("network").stream().map(DetectorRule::new).collect(Collectors.toList())); + Detector detector2 = randomDetectorWithTriggers( + getPrePackagedRules("network"), + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("network"), List.of(), List.of(), List.of(), List.of())), + Detector.DetectorType.NETWORK, + inputNetflow + ); + + createResponse = makeRequest(userClient, "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector2)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + responseBody = asMap(createResponse); + + createdId = responseBody.get("_id").toString(); + + request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + createdId + "\"\n" + + " }\n" + + " }\n" + + "}"; + hits = executeSearch(Detector.DETECTORS_INDEX, request); + hit = hits.get(0); + String monitorId2 = ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + + indexDoc(index1, "1", randomDoc()); + indexDoc(index2, "1", randomDoc()); + // execute monitor 1 + Response executeResponse = executeAlertingMonitor(monitorId1, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(5, noOfSigmaRuleMatches); + + // execute monitor 2 + executeResponse = executeAlertingMonitor(monitorId2, Collections.emptyMap()); + executeResults = entityAsMap(executeResponse); + + noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(1, noOfSigmaRuleMatches); + + client().performRequest(new Request("POST", "_refresh")); + + + // try to do get finding as a user with read access + String userRead = "userReadFinding"; + String[] backendRoles = { TEST_IT_BACKEND_ROLE }; + createUserWithData( userRead, userRead, SECURITY_ANALYTICS_READ_ACCESS_ROLE, backendRoles ); + RestClient userReadOnlyClient = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[]{}), isHttps(), userRead, userRead).setSocketTimeout(60000).build(); + + + // Call GetFindings API for first detector + Map params = new HashMap<>(); + params.put("detectorType", detector1.getDetectorType()); + Response getFindingsResponse = makeRequest(userReadOnlyClient, "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + Map getFindingsBody = entityAsMap(getFindingsResponse); + Assert.assertEquals(1, getFindingsBody.get("total_findings")); + // Call GetFindings API for second detector + params.clear(); + params.put("detectorType", detector2.getDetectorType()); + getFindingsResponse = makeRequest(userReadOnlyClient, "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + getFindingsBody = entityAsMap(getFindingsResponse); + Assert.assertEquals(1, getFindingsBody.get("total_findings")); + + // Enable backend filtering and try to read finding as a user with no backend roles matching the user who created the detector + enableOrDisableFilterBy("true"); + try { + getFindingsResponse = makeRequest(userReadOnlyClient, "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + } catch (ResponseException e) + { + assertEquals("Get finding failed", RestStatus.NOT_FOUND, restStatus(e.getResponse())); + } + finally { + userReadOnlyClient.close(); + deleteUser(userRead); + } + + // recreate user with matching backend roles and try again + String[] newBackendRoles = { TEST_HR_BACKEND_ROLE }; + createUserWithData( userRead, userRead, SECURITY_ANALYTICS_READ_ACCESS_ROLE, newBackendRoles ); + userReadOnlyClient = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[]{}), isHttps(), userRead, userRead).setSocketTimeout(60000).build(); + getFindingsResponse = makeRequest(userReadOnlyClient, "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + getFindingsBody = entityAsMap(getFindingsResponse); + Assert.assertEquals(1, getFindingsBody.get("total_findings")); + userReadOnlyClient.close(); + + + // update user with no backend roles and try again + createUser(userRead, userRead, EMPTY_ARRAY); + userReadOnlyClient = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[]{}), isHttps(), userRead, userRead).setSocketTimeout(60000).build(); + try { + getFindingsResponse = makeRequest(userReadOnlyClient, "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + } catch (ResponseException e) + { + assertEquals("Get finding failed", RestStatus.FORBIDDEN, restStatus(e.getResponse())); + } + finally { + userReadOnlyClient.close(); + deleteUser(userRead); + } + } finally { + tryDeletingRole(TEST_HR_ROLE); + } + } +} diff --git a/src/test/java/org/opensearch/securityanalytics/mapper/MapperIT.java b/src/test/java/org/opensearch/securityanalytics/mapper/MapperIT.java deleted file mode 100644 index 213d0a923..000000000 --- a/src/test/java/org/opensearch/securityanalytics/mapper/MapperIT.java +++ /dev/null @@ -1,328 +0,0 @@ -/* -Copyright OpenSearch Contributors -SPDX-License-Identifier: Apache-2.0 - */ -package org.opensearch.securityanalytics.mapper; - -import org.apache.http.HttpStatus; -import org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.opensearch.action.search.SearchResponse; -import org.opensearch.client.Request; -import org.opensearch.client.Response; -import org.opensearch.client.ResponseException; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentParser; -import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.securityanalytics.SecurityAnalyticsClientUtils; -import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; -import org.opensearch.securityanalytics.action.GetMappingsViewResponse; -import org.opensearch.test.rest.OpenSearchRestTestCase; - -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.List; -import java.util.Map; - -public class MapperIT extends OpenSearchRestTestCase { - - - public void testCreateMappingSuccess() throws IOException { - - String testIndexName = "my_index"; - - createSampleIndex(testIndexName); - - // Execute CreateMappingsAction to add alias mapping for index - Request request = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); - // both req params and req body are supported - request.setJsonEntity( - "{ \"index_name\":\"" + testIndexName + "\"," + - " \"rule_topic\":\"netflow\", " + - " \"partial\":true" + - "}" - ); - // request.addParameter("indexName", testIndexName); - // request.addParameter("ruleTopic", "netflow"); - Response response = client().performRequest(request); - assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - - // Verify mappings - GetMappingsResponse getMappingsResponse = SecurityAnalyticsClientUtils.executeGetMappingsRequest(testIndexName); - MappingsTraverser mappingsTraverser = new MappingsTraverser(getMappingsResponse.getMappings().iterator().next().value); - List flatProperties = mappingsTraverser.extractFlatNonAliasFields(); - assertTrue(flatProperties.contains("source.ip")); - assertTrue(flatProperties.contains("destination.ip")); - assertTrue(flatProperties.contains("source.port")); - assertTrue(flatProperties.contains("destination.port")); - // Try searching by alias field - String query = "{" + - " \"query\": {" + - " \"query_string\": {" + - " \"query\": \"source.port:4444\"" + - " }" + - " }" + - "}"; - SearchResponse searchResponse = SecurityAnalyticsClientUtils.executeSearchRequest(testIndexName, query); - assertEquals(1L, searchResponse.getHits().getTotalHits().value); - } - - public void testCreateMappingWithAliasesSuccess() throws IOException { - - String testIndexName = "my_index"; - - createSampleIndex(testIndexName); - - // Execute CreateMappingsAction to add alias mapping for index - Request request = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); - // both req params and req body are supported - request.setJsonEntity( - "{\n" + - " \"index_name\": \"my_index\",\n" + - " \"rule_topic\":\"netflow\", " + - " \"partial\":true," + - " \"alias_mappings\": {\n" + - " \"properties\": {\n" + - " \"source.ip\": {\n" + - " \"type\": \"alias\",\n" + - " \"path\": \"netflow.source_ipv4_address\"\n" + - " },\n" + - " \"source.port\": {\n" + - " \"type\": \"alias\",\n" + - " \"path\": \"netflow.source_transport_port\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}" - ); - // request.addParameter("indexName", testIndexName); - // request.addParameter("ruleTopic", "netflow"); - Response response = client().performRequest(request); - assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - - // Verify mappings - GetMappingsResponse getMappingsResponse = SecurityAnalyticsClientUtils.executeGetMappingsRequest(testIndexName); - MappingsTraverser mappingsTraverser = new MappingsTraverser(getMappingsResponse.getMappings().iterator().next().value); - List flatProperties = mappingsTraverser.extractFlatNonAliasFields(); - assertTrue(flatProperties.contains("source.ip")); - assertTrue(flatProperties.contains("source.port")); - // Try searching by alias field - String query = "{" + - " \"query\": {" + - " \"query_string\": {" + - " \"query\": \"source.port:4444\"" + - " }" + - " }" + - "}"; - SearchResponse searchResponse = SecurityAnalyticsClientUtils.executeSearchRequest(testIndexName, query); - assertEquals(1L, searchResponse.getHits().getTotalHits().value); - } - - public void testUpdateAndGetMappingSuccess() throws IOException { - - String testIndexName = "my_index"; - - createSampleIndex(testIndexName); - - // Execute UpdateMappingsAction to add alias mapping for index - Request updateRequest = new Request("PUT", SecurityAnalyticsPlugin.MAPPER_BASE_URI); - // both req params and req body are supported - updateRequest.setJsonEntity( - "{ \"index_name\":\"" + testIndexName + "\"," + - " \"field\":\"netflow.source_transport_port\","+ - " \"alias\":\"source.port\" }" - ); - // request.addParameter("indexName", testIndexName); - // request.addParameter("ruleTopic", "netflow"); - Response response = client().performRequest(updateRequest); - assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - - // Execute GetIndexMappingsAction and verify mappings - Request getRequest = new Request("GET", SecurityAnalyticsPlugin.MAPPER_BASE_URI); - getRequest.addParameter("index_name", testIndexName); - response = client().performRequest(getRequest); - XContentParser parser = createParser(JsonXContent.jsonXContent, new String(response.getEntity().getContent().readAllBytes(), StandardCharsets.UTF_8)); - assertTrue( - (((Map)((Map)((Map)((Map)((Map)parser.map() - .get(testIndexName)) - .get("mappings")) - .get("properties")) - .get("source")) - .get("properties")) - .containsKey("port")) - ); - // Try searching by alias field - String query = "{" + - " \"query\": {" + - " \"query_string\": {" + - " \"query\": \"source.port:4444\"" + - " }" + - " }" + - "}"; - SearchResponse searchResponse = SecurityAnalyticsClientUtils.executeSearchRequest(testIndexName, query); - assertEquals(1L, searchResponse.getHits().getTotalHits().value); - } - - public void testExistingMappingsAreUntouched() throws IOException { - String testIndexName = "existing_mappings_ok"; - - createSampleIndex(testIndexName); - - // Execute CreateMappingsAction to add alias mapping for index - Request request = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); - // both req params and req body are supported - request.setJsonEntity( - "{ \"index_name\":\"" + testIndexName + "\"," + - " \"rule_topic\":\"netflow\"," + - " \"partial\":true }" - ); - Response response = client().performRequest(request); - assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - - // Verify mappings - GetMappingsResponse getMappingsResponse = SecurityAnalyticsClientUtils.executeGetMappingsRequest(testIndexName); - Map properties = - (Map) getMappingsResponse.getMappings().get(testIndexName) - .getSourceAsMap().get("properties"); - // Verify that there is still mapping for integer field "plain1" - assertTrue(((Map)properties.get("plain1")).get("type").equals("integer")); - } - - public void testCreateIndexMappingsIndexMappingsEmpty() throws IOException { - - String testIndexName = "my_index_alias_fail_1"; - - createIndex(testIndexName, Settings.EMPTY); - - // Execute UpdateMappingsAction to add alias mapping for index - Request request = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); - // both req params and req body are supported - request.setJsonEntity( - "{ \"index_name\":\"" + testIndexName + "\"," + - " \"rule_topic\":\"netflow\"," + - " \"partial\":true }" - ); - try { - client().performRequest(request); - } catch (ResponseException e) { - assertTrue(e.getMessage().contains("Index mappings are empty")); - } - } - - public void testIndexNotExists() { - - String indexName = java.util.UUID.randomUUID().toString(); - - Request request = new Request("PUT", SecurityAnalyticsPlugin.MAPPER_BASE_URI); - request.addParameter("index_name", indexName); - request.addParameter("field", "field1"); - request.addParameter("alias", "alias123"); - try { - client().performRequest(request); - } catch (Exception e) { - assertTrue(e.getMessage().contains("Could not find index [" + indexName + "]")); - } - } - - public void testGetMappingsViewSuccess() throws IOException { - - String testIndexName = "get_mappings_view_index"; - - createSampleIndex(testIndexName); - - // Execute CreateMappingsAction to add alias mapping for index - Request request = new Request("GET", SecurityAnalyticsPlugin.MAPPINGS_VIEW_BASE_URI); - // both req params and req body are supported - request.addParameter("index_name", testIndexName); - request.addParameter("rule_topic", "netflow"); - Response response = client().performRequest(request); - assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - Map respMap = responseAsMap(response); - // Verify alias mappings - Map props = (Map) respMap.get("properties"); - assertEquals(4, props.size()); - assertTrue(props.containsKey("source.ip")); - assertTrue(props.containsKey("destination.ip")); - assertTrue(props.containsKey("source.port")); - assertTrue(props.containsKey("destination.port")); - // Verify unmapped index fields - List unmappedIndexFields = (List) respMap.get("unmapped_index_fields"); - assertEquals(6, unmappedIndexFields.size()); - // Verify unmapped field aliases - List unmappedFieldAliases = (List) respMap.get("unmapped_field_aliases"); - assertEquals(2, unmappedFieldAliases.size()); - } - - private void createSampleIndex(String indexName) throws IOException { - String indexMapping = - " \"properties\": {" + - " \"netflow.source_ipv4_address\": {" + - " \"type\": \"ip\"" + - " }," + - " \"netflow.destination_transport_port\": {" + - " \"type\": \"integer\"" + - " }," + - " \"netflow.destination_ipv4_address\": {" + - " \"type\": \"ip\"" + - " }," + - " \"netflow.source_transport_port\": {" + - " \"type\": \"integer\"" + - " }," + - " \"netflow.event.stop\": {" + - " \"type\": \"integer\"" + - " }," + - " \"dns.event.stop\": {" + - " \"type\": \"integer\"" + - " }," + - " \"ipx.event.stop\": {" + - " \"type\": \"integer\"" + - " }," + - " \"plain1\": {" + - " \"type\": \"integer\"" + - " }," + - " \"user\":{" + - " \"type\":\"nested\"," + - " \"properties\":{" + - " \"first\":{" + - " \"type\":\"text\"," + - " \"fields\":{" + - " \"keyword\":{" + - " \"type\":\"keyword\"," + - " \"ignore_above\":256" + - "}" + - "}" + - "}," + - " \"last\":{" + - "\"type\":\"text\"," + - "\"fields\":{" + - " \"keyword\":{" + - " \"type\":\"keyword\"," + - " \"ignore_above\":256" + - "}" + - "}" + - "}" + - "}" + - "}" + - " }"; - - createIndex(indexName, Settings.EMPTY, indexMapping); - - // Insert sample doc - String sampleDoc = "{" + - " \"netflow.source_ipv4_address\":\"10.50.221.10\"," + - " \"netflow.destination_transport_port\":1234," + - " \"netflow.destination_ipv4_address\":\"10.53.111.14\"," + - " \"netflow.source_transport_port\":4444" + - "}"; - - // Index doc - Request indexRequest = new Request("POST", indexName + "/_doc?refresh=wait_for"); - indexRequest.setJsonEntity(sampleDoc); - Response response = client().performRequest(indexRequest); - assertEquals(HttpStatus.SC_CREATED, response.getStatusLine().getStatusCode()); - // Refresh everything - response = client().performRequest(new Request("POST", "_refresh")); - assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - } - -} diff --git a/src/test/java/org/opensearch/securityanalytics/mapper/MapperRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/mapper/MapperRestApiIT.java new file mode 100644 index 000000000..a64554000 --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/mapper/MapperRestApiIT.java @@ -0,0 +1,1650 @@ +/* +Copyright OpenSearch Contributors +SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.mapper; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; +import org.apache.http.HttpStatus; +import org.apache.http.entity.StringEntity; +import org.apache.http.message.BasicHeader; +import org.junit.Assert; +import org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.client.Request; +import org.opensearch.client.Response; +import org.opensearch.client.ResponseException; +import org.opensearch.common.Strings; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.xcontent.DeprecationHandler; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.search.SearchHit; +import org.opensearch.securityanalytics.SecurityAnalyticsClientUtils; +import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; +import org.opensearch.securityanalytics.SecurityAnalyticsRestTestCase; +import org.opensearch.securityanalytics.TestHelpers; +import org.opensearch.securityanalytics.model.Detector; +import org.opensearch.securityanalytics.model.DetectorInput; +import org.opensearch.securityanalytics.model.DetectorRule; +import org.opensearch.test.OpenSearchTestCase; + + +import static org.opensearch.securityanalytics.SecurityAnalyticsPlugin.MAPPER_BASE_URI; +import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithInputs; + +public class MapperRestApiIT extends SecurityAnalyticsRestTestCase { + + + public void testGetMappingSuccess() throws IOException { + String testIndexName1 = "my_index_1"; + String testIndexName2 = "my_index_2"; + String testIndexPattern = "my_index*"; + + createSampleIndex(testIndexName1); + createSampleIndex(testIndexName2); + + createMappingsAPI(testIndexName2, "netflow"); + + Request request = new Request("GET", MAPPER_BASE_URI + "?index_name=" + testIndexPattern); + Response response = client().performRequest(request); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + Map respMap = (Map) responseAsMap(response); + // Assert that indexName returned is one passed by user + assertTrue(respMap.containsKey(testIndexPattern)); + } + + // Tests the case when the mappings map is empty + public void testGetMappings_emptyIndex_Success() throws IOException { + String testIndexName1 = "my_index_1"; + String testIndexName2 = "my_index_2"; + String testIndexPattern = "my_index*"; + + createSampleIndex(testIndexName1); + createSampleIndex(testIndexName2); + + Request request = new Request("GET", MAPPER_BASE_URI + "?index_name=" + testIndexPattern); + Response response = client().performRequest(request); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + Map respMap = (Map) responseAsMap(response); + Map props = (Map)((Map) respMap.get(testIndexPattern)).get("mappings"); + + // Assert that indexName returned is one passed by user + assertTrue(respMap.containsKey(testIndexPattern)); + //Assert that mappings map is also present in the output + assertTrue(props.containsKey("properties")); + } + + public void testGetMappingSuccess_1() throws IOException { + String testIndexName1 = "my_index_1"; + String testIndexPattern = "my_index*"; + + createIndex(testIndexName1, Settings.EMPTY); + + String sampleDoc = "{\n" + + " \"lvl1field\": 12345,\n" + + " \"source1.ip\": \"12345\",\n" + + " \"source1.port\": 55,\n" + + " \"some.very.long.field.name\": \"test\"\n" + + "}"; + + indexDoc(testIndexName1, "1", sampleDoc); + // puts mappings with timestamp alias + String createMappingsRequest = "{\"index_name\":\"my_index*\",\"rule_topic\":\"windows\",\"partial\":true,\"alias_mappings\":{\"properties\":{\"timestamp\":{\"type\":\"alias\",\"path\":\"lvl1field\"},\"winlog-computer_name\":{\"type\":\"alias\",\"path\":\"source1.port\"},\"winlog-event_data-AuthenticationPackageName\":{\"type\":\"alias\",\"path\":\"source1.ip\"},\"winlog-event_data-Company\":{\"type\":\"alias\",\"path\":\"some.very.long.field.name\"}}}}"; + + Request request = new Request("POST", MAPPER_BASE_URI); + // both req params and req body are supported + request.setJsonEntity(createMappingsRequest); + Response response = client().performRequest(request); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + request = new Request("GET", MAPPER_BASE_URI + "?index_name=" + testIndexPattern); + response = client().performRequest(request); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + Map respMap = (Map) responseAsMap(response); + Map props = (Map)((Map) respMap.get(testIndexPattern)).get("mappings"); + props = (Map) props.get("properties"); + assertEquals(4, props.size()); + } + + public void testCreateMappingSuccess() throws IOException { + + String testIndexName = "my_index"; + + createSampleIndex(testIndexName); + + // Execute CreateMappingsAction to add alias mapping for index + Request request = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + request.setJsonEntity( + "{ \"index_name\":\"" + testIndexName + "\"," + + " \"rule_topic\":\"netflow\", " + + " \"partial\":true" + + "}" + ); + Response response = client().performRequest(request); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + // Verify mappings + GetMappingsResponse getMappingsResponse = SecurityAnalyticsClientUtils.executeGetMappingsRequest(testIndexName); + MappingsTraverser mappingsTraverser = new MappingsTraverser(getMappingsResponse.getMappings().iterator().next().value); + // After applying netflow aliases, our index will have 4 alias mappings + List flatProperties = mappingsTraverser.extractFlatNonAliasFields(); + assertFalse(flatProperties.contains("source.ip")); + assertFalse(flatProperties.contains("destination.ip")); + assertFalse(flatProperties.contains("source.port")); + assertFalse(flatProperties.contains("destination.port")); + // Try searching by alias field + String query = "{" + + " \"query\": {" + + " \"query_string\": {" + + " \"query\": \"source.port:4444\"" + + " }" + + " }" + + "}"; + SearchResponse searchResponse = SecurityAnalyticsClientUtils.executeSearchRequest(testIndexName, query); + assertEquals(1L, searchResponse.getHits().getTotalHits().value); + } + + public void testCreateMappingWithAliasesSuccess() throws IOException { + + String testIndexName = "my_index"; + + createSampleIndex(testIndexName); + + // Execute CreateMappingsAction to add alias mapping for index + Request request = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + request.setJsonEntity( + "{\n" + + " \"index_name\": \"my_index\",\n" + + " \"rule_topic\":\"netflow\", " + + " \"partial\":true," + + " \"alias_mappings\": {\n" + + " \"properties\": {\n" + + " \"source.ip\": {\n" + + " \"type\": \"alias\",\n" + + " \"path\": \"netflow.source_ipv4_address\"\n" + + " },\n" + + " \"source.port\": {\n" + + " \"type\": \"alias\",\n" + + " \"path\": \"netflow.source_transport_port\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}" + ); + // request.addParameter("indexName", testIndexName); + // request.addParameter("ruleTopic", "netflow"); + Response response = client().performRequest(request); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + // Verify mappings + GetMappingsResponse getMappingsResponse = SecurityAnalyticsClientUtils.executeGetMappingsRequest(testIndexName); + MappingsTraverser mappingsTraverser = new MappingsTraverser(getMappingsResponse.getMappings().iterator().next().value); + List flatProperties = mappingsTraverser.extractFlatNonAliasFields(); + assertFalse(flatProperties.contains("source.ip")); + assertFalse(flatProperties.contains("source.port")); + // Try searching by alias field + String query = "{" + + " \"query\": {" + + " \"query_string\": {" + + " \"query\": \"source.port:4444\"" + + " }" + + " }" + + "}"; + SearchResponse searchResponse = SecurityAnalyticsClientUtils.executeSearchRequest(testIndexName, query); + assertEquals(1L, searchResponse.getHits().getTotalHits().value); + } + + public void testUpdateAndGetMappingSuccess() throws IOException { + + String testIndexName = "my_index"; + + createSampleIndex(testIndexName); + + // Execute UpdateMappingsAction to add alias mapping for index + Request updateRequest = new Request("PUT", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + updateRequest.setJsonEntity( + "{ \"index_name\":\"" + testIndexName + "\"," + + " \"field\":\"netflow.source_transport_port\","+ + " \"alias\":\"source.port\" }" + ); + // request.addParameter("indexName", testIndexName); + // request.addParameter("ruleTopic", "netflow"); + Response response = client().performRequest(updateRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + // Execute GetIndexMappingsAction and verify mappings + Request getRequest = new Request("GET", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + getRequest.addParameter("index_name", testIndexName); + response = client().performRequest(getRequest); + XContentParser parser = createParser(JsonXContent.jsonXContent, new String(response.getEntity().getContent().readAllBytes(), StandardCharsets.UTF_8)); + assertTrue( + (((Map)((Map)((Map)((Map)((Map)parser.map() + .get(testIndexName)) + .get("mappings")) + .get("properties")) + .get("source")) + .get("properties")) + .containsKey("port")) + ); + // Try searching by alias field + String query = "{" + + " \"query\": {" + + " \"query_string\": {" + + " \"query\": \"source.port:4444\"" + + " }" + + " }" + + "}"; + SearchResponse searchResponse = SecurityAnalyticsClientUtils.executeSearchRequest(testIndexName, query); + assertEquals(1L, searchResponse.getHits().getTotalHits().value); + } + + // Tests the case when alias mappings are not present on the index + public void testUpdateAndGetMapping_notFound_Success() throws IOException { + + String testIndexName = "my_index"; + + createSampleIndex(testIndexName); + + // Execute UpdateMappingsAction to add other settings except alias mapping for index + Request updateRequest = new Request("PUT", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + updateRequest.setJsonEntity( + "{ \"index_name\":\"" + testIndexName + "\"," + + " \"field\":\"netflow.source_transport_port\","+ + " \"alias\":\"\\u0000\" }" + ); + + Response response = client().performRequest(updateRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + // Execute GetIndexMappingsAction and verify mappings + Request getRequest = new Request("GET", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + getRequest.addParameter("index_name", testIndexName); + response = client().performRequest(getRequest); + XContentParser parser = createParser(JsonXContent.jsonXContent, new String(response.getEntity().getContent().readAllBytes(), StandardCharsets.UTF_8)); + assertTrue( + (((Map)((Map)parser.map() + .get(testIndexName)) + .get("mappings")) + .containsKey("properties"))); + } + + public void testExistingMappingsAreUntouched() throws IOException { + String testIndexName = "existing_mappings_ok"; + + createSampleIndex(testIndexName); + + // Execute CreateMappingsAction to add alias mapping for index + Request request = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + request.setJsonEntity( + "{ \"index_name\":\"" + testIndexName + "\"," + + " \"rule_topic\":\"netflow\"," + + " \"partial\":true }" + ); + Response response = client().performRequest(request); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + // Verify mappings + GetMappingsResponse getMappingsResponse = SecurityAnalyticsClientUtils.executeGetMappingsRequest(testIndexName); + Map properties = + (Map) getMappingsResponse.getMappings().get(testIndexName) + .getSourceAsMap().get("properties"); + // Verify that there is still mapping for integer field "plain1" + assertTrue(((Map)properties.get("plain1")).get("type").equals("integer")); + } + + public void testCreateIndexMappingsIndexMappingsEmpty() throws IOException { + + String testIndexName = "my_index_alias_fail_1"; + + createIndex(testIndexName, Settings.EMPTY); + + // Execute UpdateMappingsAction to add alias mapping for index + Request request = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + request.setJsonEntity( + "{ \"index_name\":\"" + testIndexName + "\"," + + " \"rule_topic\":\"netflow\"," + + " \"partial\":true }" + ); + try { + client().performRequest(request); + } catch (ResponseException e) { + assertTrue(e.getMessage().contains("Mappings for index [my_index_alias_fail_1] are empty")); + } + } + + public void testIndexNotExists() { + + String indexName = java.util.UUID.randomUUID().toString(); + + Request request = new Request("PUT", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + request.addParameter("index_name", indexName); + request.addParameter("field", "field1"); + request.addParameter("alias", "alias123"); + try { + client().performRequest(request); + } catch (Exception e) { + assertTrue(e.getMessage().contains("Could not find index [" + indexName + "]")); + } + } + + public void testGetMappingsViewSuccess() throws IOException { + + String testIndexName = "get_mappings_view_index"; + + createSampleIndex(testIndexName); + + // Execute GetMappingsViewAction to add alias mapping for index + Request request = new Request("GET", SecurityAnalyticsPlugin.MAPPINGS_VIEW_BASE_URI); + // both req params and req body are supported + request.addParameter("index_name", testIndexName); + request.addParameter("rule_topic", "netflow"); + Response response = client().performRequest(request); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + Map respMap = responseAsMap(response); + // Verify alias mappings + Map props = (Map) respMap.get("properties"); + assertEquals(4, props.size()); + assertTrue(props.containsKey("source.ip")); + assertTrue(props.containsKey("destination.ip")); + assertTrue(props.containsKey("source.port")); + assertTrue(props.containsKey("destination.port")); + // Verify unmapped index fields + List unmappedIndexFields = (List) respMap.get("unmapped_index_fields"); + assertEquals(6, unmappedIndexFields.size()); + // Verify unmapped field aliases + List unmappedFieldAliases = (List) respMap.get("unmapped_field_aliases"); + assertEquals(3, unmappedFieldAliases.size()); + } + + public void testCreateMappings_withDatastream_success() throws IOException { + String datastream = "test_datastream"; + + String datastreamMappings = "\"properties\": {" + + " \"@timestamp\":{ \"type\": \"date\" }," + + " \"netflow.destination_transport_port\":{ \"type\": \"long\" }," + + " \"netflow.destination_ipv4_address\":{ \"type\": \"ip\" }" + + "}"; + + createSampleDatastream(datastream, datastreamMappings); + + // Execute CreateMappingsAction to add alias mapping for index + createMappingsAPI(datastream, "netflow"); + + // Verify mappings + Map props = getIndexMappingsAPIFlat(datastream); + assertEquals(5, props.size()); + assertTrue(props.containsKey("@timestamp")); + assertTrue(props.containsKey("netflow.destination_transport_port")); + assertTrue(props.containsKey("netflow.destination_ipv4_address")); + assertTrue(props.containsKey("destination.ip")); + assertTrue(props.containsKey("destination.port")); + + // Verify that index template applied mappings + Response response = makeRequest(client(), "POST", datastream + "/_rollover", Collections.emptyMap(), null); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + // Insert doc to index to add additional fields to mapping + String sampleDoc = "{" + + " \"@timestamp\":\"2023-01-06T00:05:00\"," + + " \"netflow.source_ipv4_address\":\"10.50.221.10\"," + + " \"netflow.source_transport_port\":4444" + + "}"; + + indexDoc(datastream, "2", sampleDoc); + + // Execute CreateMappingsAction to add alias mapping for index + createMappingsAPI(datastream, "netflow"); + + String writeIndex = getDatastreamWriteIndex(datastream); + + // Verify mappings + props = getIndexMappingsAPIFlat(writeIndex); + assertEquals(9, props.size()); + assertTrue(props.containsKey("@timestamp")); + assertTrue(props.containsKey("netflow.source_ipv4_address")); + assertTrue(props.containsKey("netflow.source_transport_port")); + assertTrue(props.containsKey("netflow.destination_transport_port")); + assertTrue(props.containsKey("netflow.destination_ipv4_address")); + assertTrue(props.containsKey("destination.ip")); + assertTrue(props.containsKey("destination.port")); + assertTrue(props.containsKey("source.ip")); + assertTrue(props.containsKey("source.port")); + + // Get applied mappings + props = getIndexMappingsSAFlat(datastream); + assertTrue(props.containsKey("destination.ip")); + assertTrue(props.containsKey("destination.port")); + assertTrue(props.containsKey("source.ip")); + assertTrue(props.containsKey("source.port")); + + deleteDatastreamAPI(datastream); + } + + public void testCreateMappings_withDatastream_withTemplateField_success() throws IOException { + String datastream = "test_datastream"; + + String datastreamMappings = "\"properties\": {" + + " \"@timestamp\":{ \"type\": \"date\" }," + + " \"netflow.destination_transport_port\":{ \"type\": \"long\" }," + + " \"netflow.destination_ipv4_address\":{ \"type\": \"ip\" }" + + "}"; + + createSampleDatastream(datastream, datastreamMappings, false); + + // Execute CreateMappingsAction to add alias mapping for index + createMappingsAPI(datastream, "netflow"); + + // Verify mappings + Map props = getIndexMappingsAPIFlat(datastream); + assertEquals(5, props.size()); + assertTrue(props.containsKey("@timestamp")); + assertTrue(props.containsKey("netflow.destination_transport_port")); + assertTrue(props.containsKey("netflow.destination_ipv4_address")); + assertTrue(props.containsKey("destination.ip")); + assertTrue(props.containsKey("destination.port")); + + // Verify that index template applied mappings + Response response = makeRequest(client(), "POST", datastream + "/_rollover", Collections.emptyMap(), null); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + // Insert doc to index to add additional fields to mapping + String sampleDoc = "{" + + " \"@timestamp\":\"2023-01-06T00:05:00\"," + + " \"netflow.source_ipv4_address\":\"10.50.221.10\"," + + " \"netflow.source_transport_port\":4444" + + "}"; + + indexDoc(datastream, "2", sampleDoc); + + // Execute CreateMappingsAction to add alias mapping for index + createMappingsAPI(datastream, "netflow"); + + String writeIndex = getDatastreamWriteIndex(datastream); + + // Verify mappings + props = getIndexMappingsAPIFlat(writeIndex); + assertEquals(9, props.size()); + assertTrue(props.containsKey("@timestamp")); + assertTrue(props.containsKey("netflow.source_ipv4_address")); + assertTrue(props.containsKey("netflow.source_transport_port")); + assertTrue(props.containsKey("netflow.destination_transport_port")); + assertTrue(props.containsKey("netflow.destination_ipv4_address")); + assertTrue(props.containsKey("destination.ip")); + assertTrue(props.containsKey("destination.port")); + assertTrue(props.containsKey("source.ip")); + assertTrue(props.containsKey("source.port")); + + // Get applied mappings + props = getIndexMappingsSAFlat(datastream); + assertTrue(props.containsKey("destination.ip")); + assertTrue(props.containsKey("destination.port")); + assertTrue(props.containsKey("source.ip")); + assertTrue(props.containsKey("source.port")); + + deleteDatastreamAPI(datastream); + } + + public void testCreateMappings_withIndexPattern_existing_indexTemplate_update_success() throws IOException { + String indexName1 = "test_index_1"; + String indexName2 = "test_index_2"; + String indexName3 = "test_index_3"; + + String indexPattern = "test_index*"; + + String componentTemplateMappings = "\"properties\": {" + + " \"netflow.destination_transport_port\":{ \"type\": \"long\" }," + + " \"netflow.destination_ipv4_address\":{ \"type\": \"ip\" }" + + "}"; + + // Setup index_template + createComponentTemplateWithMappings( + IndexTemplateUtils.computeComponentTemplateName(indexPattern), + componentTemplateMappings + ); + + createComposableIndexTemplate( + IndexTemplateUtils.computeIndexTemplateName(indexPattern), + List.of(indexPattern), + IndexTemplateUtils.computeComponentTemplateName(indexPattern), + null, + false + ); + + createIndex(indexName1, Settings.EMPTY, null); + + // Execute CreateMappingsAction to apply alias mappings - index template should be updated + createMappingsAPI(indexPattern, "netflow"); + + // Create new index to verify that index template is updated + createIndex(indexName2, Settings.EMPTY, null); + + // Verify that template applied mappings + Map props = getIndexMappingsAPIFlat(indexName2); + assertEquals(4, props.size()); + assertTrue(props.containsKey("netflow.destination_transport_port")); + assertTrue(props.containsKey("netflow.destination_ipv4_address")); + assertTrue(props.containsKey("destination.ip")); + assertTrue(props.containsKey("destination.port")); + + // Verify our GetIndexMappings -- applied mappings + props = getIndexMappingsSAFlat(indexPattern); + assertEquals(2, props.size()); + assertTrue(props.containsKey("destination.ip")); + assertTrue(props.containsKey("destination.port")); + + + // Insert doc to index to add additional fields to mapping + String sampleDoc = "{" + + " \"netflow.source_ipv4_address\":\"10.50.221.10\"," + + " \"netflow.source_transport_port\":4444" + + "}"; + + indexDoc(indexName2, "1", sampleDoc); + + // Call CreateMappings API and expect index template to be updated with 2 additional aliases + createMappingsAPI(indexPattern, "netflow"); + + // Create new index to verify that index template was updated correctly + createIndex(indexName3, Settings.EMPTY, null); + + // Verify mappings + props = getIndexMappingsAPIFlat(indexName3); + assertEquals(8, props.size()); + assertTrue(props.containsKey("source.ip")); + assertTrue(props.containsKey("destination.ip")); + assertTrue(props.containsKey("source.port")); + assertTrue(props.containsKey("destination.port")); + assertTrue(props.containsKey("netflow.source_transport_port")); + assertTrue(props.containsKey("netflow.source_ipv4_address")); + assertTrue(props.containsKey("netflow.destination_transport_port")); + assertTrue(props.containsKey("netflow.destination_ipv4_address")); + + // Verify our GetIndexMappings -- applied mappings + props = getIndexMappingsSAFlat(indexPattern); + assertEquals(4, props.size()); + assertTrue(props.containsKey("source.ip")); + assertTrue(props.containsKey("destination.ip")); + assertTrue(props.containsKey("source.port")); + assertTrue(props.containsKey("destination.port")); + } + + public void testCreateMappings_withIndexPattern_differentMappings_indexTemplateCleanup_success() throws IOException, InterruptedException { + String indexName1 = "test_index_1"; + String indexName2 = "test_index_2"; + String indexPattern = "test_index*"; + + createIndex(indexName1, Settings.EMPTY, null); + createIndex(indexName2, Settings.EMPTY, null); + + client().performRequest(new Request("POST", "_refresh")); + + // Insert sample docs + String sampleDoc1 = "{" + + " \"netflow.source_ipv4_address\":\"10.50.221.10\"," + + " \"netflow.destination_transport_port\":1234," + + " \"netflow.source_transport_port\":4444" + + "}"; + String sampleDoc2 = "{" + + " \"netflow.destination_transport_port\":1234," + + " \"netflow.destination_ipv4_address\":\"10.53.111.14\"" + + "}"; + indexDoc(indexName1, "1", sampleDoc1); + indexDoc(indexName2, "1", sampleDoc2); + + client().performRequest(new Request("POST", "_refresh")); + + // Execute CreateMappingsAction to add alias mapping for index + createMappingsAPI(indexPattern, "netflow"); + + DetectorInput input = new DetectorInput("", List.of(indexPattern), List.of(), + getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList())); + String detectorId = createDetector(TestHelpers.randomDetectorWithInputs(List.of((input)))); + + refreshAllIndices(); + + List componentTemplates = getAllComponentTemplates(); + assertEquals(1, componentTemplates.size()); + List composableIndexTemplates = getAllComposableIndexTemplates(); + assertEquals(2, composableIndexTemplates.size()); + + deleteDetector(detectorId); + + // Wait for clusterState update to be published/applied + OpenSearchTestCase.waitUntil(() -> { + try { + List ct = getAllComponentTemplates(); + if (ct.size() == 0) { + return true; + } else { + return false; + } + } catch (IOException e) { + + } + return false; + }); + OpenSearchTestCase.waitUntil(() -> { + try { + List cct = getAllComposableIndexTemplates(); + if (cct.size() == 1) { + return true; + } else { + return false; + } + } catch (IOException e) { + + } + return false; + }); + + componentTemplates = getAllComponentTemplates(); + assertEquals(0, componentTemplates.size()); + composableIndexTemplates = getAllComposableIndexTemplates(); + assertEquals(1, composableIndexTemplates.size()); + } + + public void testCreateMappings_withIndexPattern_indexTemplate_createAndUpdate_success() throws IOException { + String indexName1 = "test_index_1"; + String indexName2 = "test_index_2"; + String indexName3 = "test_index_3"; + String indexName4 = "test_index_4"; + + String indexPattern = "test_index*"; + + createIndex(indexName1, Settings.EMPTY, null); + createIndex(indexName2, Settings.EMPTY, null); + + client().performRequest(new Request("POST", "_refresh")); + + // Insert sample doc + String sampleDoc1 = "{" + + " \"netflow.destination_transport_port\":1234," + + " \"netflow.destination_ipv4_address\":\"10.53.111.14\"" + + "}"; + + indexDoc(indexName1, "1", sampleDoc1); + indexDoc(indexName2, "1", sampleDoc1); + + client().performRequest(new Request("POST", "_refresh")); + + // Execute CreateMappingsAction to add alias mapping for index + createMappingsAPI(indexPattern, "netflow"); + + // Verify that index template is up + createIndex(indexName3, Settings.EMPTY, null); + + // Execute CreateMappingsAction to add alias mapping for index + Request request = new Request("GET", indexName3 + "/_mapping"); + Response response = client().performRequest(request); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + Map respMap = (Map) responseAsMap(response).get(indexName3); + + MappingsTraverser mappingsTraverser = new MappingsTraverser((Map) respMap.get("mappings"), Set.of()); + Map flatMappings = mappingsTraverser.traverseAndCopyAsFlat(); + // Verify mappings + Map props = (Map) flatMappings.get("properties"); + assertEquals(4, props.size()); + assertTrue(props.containsKey("destination.ip")); + assertTrue(props.containsKey("destination.port")); + assertTrue(props.containsKey("netflow.destination_transport_port")); + assertTrue(props.containsKey("netflow.destination_ipv4_address")); + + String sampleDoc2 = "{" + + " \"netflow.source_ipv4_address\":\"10.50.221.10\"," + + " \"netflow.destination_transport_port\":1234," + + " \"netflow.destination_ipv4_address\":\"10.53.111.14\"," + + " \"netflow.source_transport_port\":4444" + + "}"; + + indexDoc(indexName3, "1", sampleDoc2); + + // Execute CreateMappingsAction to add alias mapping for index + createMappingsAPI(indexPattern, "netflow"); + + // Verify that index template is updated + createIndex(indexName4, Settings.EMPTY, null); + + // Execute CreateMappingsAction to add alias mapping for index + request = new Request("GET", indexName4 + "/_mapping"); + response = client().performRequest(request); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + respMap = (Map) responseAsMap(response).get(indexName4); + + mappingsTraverser = new MappingsTraverser((Map) respMap.get("mappings"), Set.of()); + flatMappings = mappingsTraverser.traverseAndCopyAsFlat(); + // Verify mappings + props = (Map) flatMappings.get("properties"); + assertEquals(8, props.size()); + assertTrue(props.containsKey("source.ip")); + assertTrue(props.containsKey("destination.ip")); + assertTrue(props.containsKey("source.port")); + assertTrue(props.containsKey("destination.port")); + assertTrue(props.containsKey("netflow.source_transport_port")); + assertTrue(props.containsKey("netflow.source_ipv4_address")); + assertTrue(props.containsKey("netflow.destination_transport_port")); + assertTrue(props.containsKey("netflow.destination_ipv4_address")); + + // Verify applied mappings + props = getIndexMappingsSAFlat(indexName4); + assertEquals(4, props.size()); + assertTrue(props.containsKey("source.ip")); + assertTrue(props.containsKey("destination.ip")); + assertTrue(props.containsKey("source.port")); + assertTrue(props.containsKey("destination.port")); + } + + public void testCreateMappings_withIndexPattern_oneNoMappings_failure() throws IOException { + String indexName1 = "test_index_1"; + String indexName2 = "test_index_2"; + String indexPattern = "test_index*"; + + createIndex(indexName1, Settings.EMPTY, null); + createIndex(indexName2, Settings.EMPTY, null); + + client().performRequest(new Request("POST", "_refresh")); + + // Insert sample docs + String sampleDoc1 = "{" + + " \"netflow.source_ipv4_address\":\"10.50.221.10\"," + + " \"netflow.destination_transport_port\":1234," + + " \"netflow.source_transport_port\":4444" + + "}"; + indexDoc(indexName1, "1", sampleDoc1); + + client().performRequest(new Request("POST", "_refresh")); + + // Execute CreateMappingsAction to add alias mapping for index + try { + createMappingsAPI(indexPattern, "netflow"); + fail("expected 500 failure!"); + } catch (ResponseException e) { + assertEquals(HttpStatus.SC_INTERNAL_SERVER_ERROR, e.getResponse().getStatusLine().getStatusCode()); + } + + } + + public void testGetMappingsView_index_pattern_two_indices_Success() throws IOException { + + String testIndexName1 = "get_mappings_view_index111"; + String testIndexName2 = "get_mappings_view_index122"; + String testIndexName3 = "get_mappings_view_index"; + + String indexPattern = "get_mappings_view_index1*"; + String indexPattern2 = "get_mappings_view_index*"; + + createSampleIndex(testIndexName1); + createSampleIndex(testIndexName2); + indexDoc(testIndexName2, "987654", "{ \"extra_field\": 12345 }"); + + // Execute CreateMappingsAction to add alias mapping for index + Request request = new Request("GET", SecurityAnalyticsPlugin.MAPPINGS_VIEW_BASE_URI); + // both req params and req body are supported + request.addParameter("index_name", indexPattern); + request.addParameter("rule_topic", "netflow"); + Response response = client().performRequest(request); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + Map respMap = responseAsMap(response); + // Verify alias mappings + Map props = (Map) respMap.get("properties"); + assertEquals(4, props.size()); + assertTrue(props.containsKey("source.ip")); + assertTrue(props.containsKey("destination.ip")); + assertTrue(props.containsKey("source.port")); + assertTrue(props.containsKey("destination.port")); + // Verify unmapped index fields + List unmappedIndexFields = (List) respMap.get("unmapped_index_fields"); + assertEquals(7, unmappedIndexFields.size()); + // Verify that we got Mappings View of concrete index testIndexName2 because it is newest of all under this alias + Optional extraField = unmappedIndexFields.stream().filter(e -> e.equals("extra_field")).findFirst(); + assertTrue(extraField.isPresent()); + // Verify unmapped field aliases + List unmappedFieldAliases = (List) respMap.get("unmapped_field_aliases"); + assertEquals(3, unmappedFieldAliases.size()); + } + + public void testGetMappingsView_alias_without_writeindex_Success() throws IOException { + + String testIndexName1 = "get_mappings_view_index11"; + String testIndexName2 = "get_mappings_view_index22"; + String indexAlias = "index_alias"; + createSampleIndex(testIndexName1, Settings.EMPTY, "\"" + indexAlias + "\":{}"); + createSampleIndex(testIndexName2, Settings.EMPTY, "\"" + indexAlias + "\":{}"); + indexDoc(testIndexName2, "987654", "{ \"extra_field\": 12345 }"); + + // Execute CreateMappingsAction to add alias mapping for index + Request request = new Request("GET", SecurityAnalyticsPlugin.MAPPINGS_VIEW_BASE_URI); + // both req params and req body are supported + request.addParameter("index_name", indexAlias); + request.addParameter("rule_topic", "netflow"); + Response response = client().performRequest(request); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + Map respMap = responseAsMap(response); + // Verify alias mappings + Map props = (Map) respMap.get("properties"); + assertEquals(4, props.size()); + assertTrue(props.containsKey("source.ip")); + assertTrue(props.containsKey("destination.ip")); + assertTrue(props.containsKey("source.port")); + assertTrue(props.containsKey("destination.port")); + // Verify unmapped index fields + List unmappedIndexFields = (List) respMap.get("unmapped_index_fields"); + assertEquals(7, unmappedIndexFields.size()); + // Verify that we got Mappings View of concrete index testIndexName2 because it is newest of all under this alias + Optional extraField = unmappedIndexFields.stream().filter(e -> e.equals("extra_field")).findFirst(); + assertTrue(extraField.isPresent()); + // Verify unmapped field aliases + List unmappedFieldAliases = (List) respMap.get("unmapped_field_aliases"); + assertEquals(3, unmappedFieldAliases.size()); + } + + public void testGetMappingsView_alias_with_writeindex_Success() throws IOException { + + String testIndexName1 = "get_mappings_view_index11"; + String testIndexName2 = "get_mappings_view_index22"; + String indexAlias = "index_alias"; + + createSampleIndex(testIndexName2, Settings.EMPTY, "\"" + indexAlias + "\":{}"); + createSampleIndex(testIndexName1, Settings.EMPTY, "\"" + indexAlias + "\":{ \"is_write_index\":true }"); + + // Add extra field by inserting doc to index #1 to differentiate two easier + indexDoc(testIndexName1, "987654", "{ \"extra_field\": 12345 }"); + + // Execute CreateMappingsAction to add alias mapping for index + Request request = new Request("GET", SecurityAnalyticsPlugin.MAPPINGS_VIEW_BASE_URI); + // both req params and req body are supported + request.addParameter("index_name", indexAlias); + request.addParameter("rule_topic", "netflow"); + Response response = client().performRequest(request); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + Map respMap = responseAsMap(response); + // Verify alias mappings + Map props = (Map) respMap.get("properties"); + assertEquals(4, props.size()); + assertTrue(props.containsKey("source.ip")); + assertTrue(props.containsKey("destination.ip")); + assertTrue(props.containsKey("source.port")); + assertTrue(props.containsKey("destination.port")); + // Verify unmapped index fields + List unmappedIndexFields = (List) respMap.get("unmapped_index_fields"); + assertEquals(7, unmappedIndexFields.size()); + // Verify that we got Mappings View of concrete index testIndexName2 because it is newest of all under this alias + Optional extraField = unmappedIndexFields.stream().filter(e -> e.equals("extra_field")).findFirst(); + assertTrue(extraField.isPresent()); + // Verify unmapped field aliases + List unmappedFieldAliases = (List) respMap.get("unmapped_field_aliases"); + assertEquals(3, unmappedFieldAliases.size()); + } + + public void testGetMappingsView_datastream_one_backing_index_Success() throws IOException { + + String datastreamName = "my_data_stream"; + createSampleDatastream(datastreamName); + // Execute GetMappingsViewAction to add alias mapping for index + Request request = new Request("GET", SecurityAnalyticsPlugin.MAPPINGS_VIEW_BASE_URI); + // both req params and req body are supported + request.addParameter("index_name", datastreamName); + request.addParameter("rule_topic", "netflow"); + Response response = client().performRequest(request); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + Map respMap = responseAsMap(response); + // Verify alias mappings + Map props = (Map) respMap.get("properties"); + assertEquals(4, props.size()); + assertTrue(props.containsKey("source.ip")); + assertTrue(props.containsKey("destination.ip")); + assertTrue(props.containsKey("source.port")); + assertTrue(props.containsKey("destination.port")); + // Verify unmapped index fields + List unmappedIndexFields = (List) respMap.get("unmapped_index_fields"); + assertEquals(7, unmappedIndexFields.size()); + // Verify unmapped field aliases + List unmappedFieldAliases = (List) respMap.get("unmapped_field_aliases"); + assertEquals(3, unmappedFieldAliases.size()); + + deleteDatastream(datastreamName); + } + + public void testGetMappingsView_datastream_two_backing_index_Success() throws IOException { + + String datastreamName = "my_data_stream"; + createSampleDatastream(datastreamName); + + // Modify index template to change mappings and then rollover + String indexMapping = + " \"properties\": {" + + " \"@timestamp\": {" + + " \"type\": \"date\"" + + " }," + + " \"netflow.source_ipv4_address\": {" + + " \"type\": \"ip\"" + + " }" + + "}"; + + String indexTemplateRequest = "{\n" + + " \"index_patterns\": [\"" + datastreamName + "*\"],\n" + + " \"data_stream\": { },\n" + + " \"template\": {\n" + + " \"mappings\" : {" + indexMapping + "}\n" + + " }," + + " \"priority\": 500\n" + + "}"; + + + Response response = makeRequest(client(), "PUT", "_index_template/" + datastreamName + "-template", Collections.emptyMap(), + new StringEntity(indexTemplateRequest), new BasicHeader("Content-Type", "application/json")); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + doRollover(datastreamName); + + // Execute GetMappingsViewAction to add alias mapping for index + Request request = new Request("GET", SecurityAnalyticsPlugin.MAPPINGS_VIEW_BASE_URI); + // both req params and req body are supported + request.addParameter("index_name", datastreamName); + request.addParameter("rule_topic", "netflow"); + response = client().performRequest(request); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + Map respMap = responseAsMap(response); + // Verify alias mappings + Map props = (Map) respMap.get("properties"); + assertEquals(1, props.size()); + assertTrue(props.containsKey("source.ip")); + // Verify unmapped index fields + + // Verify unmapped field aliases + List unmappedFieldAliases = (List) respMap.get("unmapped_field_aliases"); + assertEquals(6, unmappedFieldAliases.size()); + + deleteDatastream(datastreamName); + } + + public void testCreateMappings_withIndexPattern_success() throws IOException { + String indexName1 = "test_index_1"; + String indexName2 = "test_index_2"; + String indexPattern = "test_index*"; + + createIndex(indexName1, Settings.EMPTY, null); + createIndex(indexName2, Settings.EMPTY, null); + + client().performRequest(new Request("POST", "_refresh")); + + // Insert sample doc + String sampleDoc = "{" + + " \"netflow.source_ipv4_address\":\"10.50.221.10\"," + + " \"netflow.destination_transport_port\":1234," + + " \"netflow.destination_ipv4_address\":\"10.53.111.14\"," + + " \"netflow.source_transport_port\":4444" + + "}"; + + indexDoc(indexName1, "1", sampleDoc); + indexDoc(indexName2, "1", sampleDoc); + + client().performRequest(new Request("POST", "_refresh")); + + // Execute CreateMappingsAction to add alias mapping for index + Request request = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + request.setJsonEntity( + "{ \"index_name\":\"" + indexPattern + "\"," + + " \"rule_topic\":\"netflow\", " + + " \"partial\":true" + + "}" + ); + Response response = client().performRequest(request); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + } + + public void testCreateMappings_withIndexPattern_conflictingTemplates_success() throws IOException { + String indexName1 = "test_index_11"; + String indexName2 = "test_index_12"; + String indexName3 = "test_index_13"; + String indexName4 = "test_index44"; + String indexPattern1 = "test_index_1*"; + String indexPattern2 = "test_index*"; + + createIndex(indexName1, Settings.EMPTY, null); + createIndex(indexName2, Settings.EMPTY, null); + + client().performRequest(new Request("POST", "_refresh")); + + // Insert sample doc + String sampleDoc = "{" + + " \"netflow.source_ipv4_address\":\"10.50.221.10\"," + + " \"netflow.destination_transport_port\":1234" + + "}"; + + indexDoc(indexName1, "1", sampleDoc); + indexDoc(indexName2, "1", sampleDoc); + + client().performRequest(new Request("POST", "_refresh")); + + // Execute CreateMappingsAction with first index pattern + createMappingsAPI(indexPattern1, "netflow"); + + createIndex(indexName3, Settings.EMPTY, null); + + // Insert sample doc + String sampleDoc2 = "{" + + " \"netflow.source_ipv4_address\":\"10.50.221.10\"," + + " \"netflow.destination_transport_port\":1234," + + " \"netflow.destination_ipv4_address\":\"10.53.111.14\"," + + " \"netflow.source_transport_port\":4444" + + "}"; + + indexDoc(indexName3, "1", sampleDoc2); + + // Execute CreateMappingsAction with conflicting index pattern - expect template to be updated + createMappingsAPI(indexPattern2, "netflow"); + + createIndex(indexName4, Settings.EMPTY, null); + // Verify with GET _mapping + Map props = getIndexMappingsAPIFlat(indexName4); + assertEquals(8, props.size()); + // Verify with SA's GetIndexMappings + props = getIndexMappingsSAFlat(indexName4); + assertEquals(4, props.size()); + assertTrue(props.containsKey("source.ip")); + assertTrue(props.containsKey("source.port")); + assertTrue(props.containsKey("destination.ip")); + assertTrue(props.containsKey("destination.port")); + } + + public void testCreateMappings_withIndexPattern_conflictingTemplates_failure_1() throws IOException { + String indexName1 = "test_index_11"; + String indexName2 = "test_index_12"; + String indexName3 = "test_index_13"; + String indexName4 = "test_index44"; + String indexPattern1 = "test_index_1*"; + String indexPattern2 = "test_index*"; + + createIndex(indexName1, Settings.EMPTY, null); + createIndex(indexName2, Settings.EMPTY, null); + + client().performRequest(new Request("POST", "_refresh")); + + // Insert sample doc + String sampleDoc = "{" + + " \"netflow.source_ipv4_address\":\"10.50.221.10\"," + + " \"netflow.destination_transport_port\":1234" + + "}"; + + indexDoc(indexName1, "1", sampleDoc); + indexDoc(indexName2, "1", sampleDoc); + + client().performRequest(new Request("POST", "_refresh")); + + // Execute CreateMappingsAction with first index pattern + createMappingsAPI(indexPattern1, "netflow"); + + // User-create template with conflicting pattern but higher priority + createComponentTemplateWithMappings("user_component_template", "\"properties\": { \"some_field\": { \"type\": \"long\" } }"); + createComposableIndexTemplate("user_custom_template", List.of("test_index_111111*"), "user_component_template", null, false, 100); + + // Execute CreateMappingsAction and expect 2 conflicting templates and failure + try { + createMappingsAPI(indexPattern2, "netflow"); + } catch (ResponseException e) { + assertTrue(e.getMessage().contains("Found conflicting templates: [user_custom_template, .opensearch-sap-alias-mappings-index-template-test_index_1]")); + } + } + + public void testCreateMappings_withIndexPattern_conflictingTemplates_failure_2() throws IOException { + String indexName1 = "test_index_11"; + String indexName2 = "test_index_12"; + String indexName3 = "test_index_13"; + String indexName4 = "test_index44"; + String indexPattern1 = "test_index_1*"; + String indexPattern2 = "test_index*"; + + createIndex(indexName1, Settings.EMPTY, null); + createIndex(indexName2, Settings.EMPTY, null); + + client().performRequest(new Request("POST", "_refresh")); + + // Insert sample doc + String sampleDoc = "{" + + " \"netflow.source_ipv4_address\":\"10.50.221.10\"," + + " \"netflow.destination_transport_port\":1234" + + "}"; + + indexDoc(indexName1, "1", sampleDoc); + indexDoc(indexName2, "1", sampleDoc); + + client().performRequest(new Request("POST", "_refresh")); + + + // User-create template with conflicting pattern but higher priority + createComponentTemplateWithMappings("user_component_template", "\"properties\": { \"some_field\": { \"type\": \"long\" } }"); + createComposableIndexTemplate("user_custom_template", List.of("test_index_111111*"), "user_component_template", null, false, 100); + + // Execute CreateMappingsAction and expect conflict with 1 user template + try { + createMappingsAPI(indexPattern2, "netflow"); + } catch (ResponseException e) { + assertTrue(e.getMessage().contains("Found conflicting template: [user_custom_template]")); + } + } + + + public void testCreateMappings_withIndexPattern_oneNoMatches_success() throws IOException { + String indexName1 = "test_index_1"; + String indexName2 = "test_index_2"; + String indexPattern = "test_index*"; + + createIndex(indexName1, Settings.EMPTY, null); + createIndex(indexName2, Settings.EMPTY, null); + + client().performRequest(new Request("POST", "_refresh")); + + // Insert sample docs + String sampleDoc1 = "{" + + " \"netflow.source_ipv4_address\":\"10.50.221.10\"," + + " \"netflow.destination_transport_port\":1234," + + " \"netflow.source_transport_port\":4444" + + "}"; + String sampleDoc2 = "{" + + " \"netflow11.destination33_transport_port\":1234," + + " \"netflow11.destination33_ipv4_address\":\"10.53.111.14\"" + + "}"; + indexDoc(indexName1, "1", sampleDoc1); + indexDoc(indexName2, "1", sampleDoc2); + + client().performRequest(new Request("POST", "_refresh")); + + // Execute CreateMappingsAction to add alias mapping for index + Request request = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + request.setJsonEntity( + "{ \"index_name\":\"" + indexPattern + "\"," + + " \"rule_topic\":\"netflow\", " + + " \"partial\":true" + + "}" + ); + Response response = client().performRequest(request); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + } + + private void createSampleIndex(String indexName) throws IOException { + createSampleIndex(indexName, Settings.EMPTY, null); + } + + private void createSampleIndex(String indexName, Settings settings, String aliases) throws IOException { + String indexMapping = + " \"properties\": {" + + " \"netflow.source_ipv4_address\": {" + + " \"type\": \"ip\"" + + " }," + + " \"netflow.destination_transport_port\": {" + + " \"type\": \"integer\"" + + " }," + + " \"netflow.destination_ipv4_address\": {" + + " \"type\": \"ip\"" + + " }," + + " \"netflow.source_transport_port\": {" + + " \"type\": \"integer\"" + + " }," + + " \"netflow.event.stop\": {" + + " \"type\": \"integer\"" + + " }," + + " \"dns.event.stop\": {" + + " \"type\": \"integer\"" + + " }," + + " \"ipx.event.stop\": {" + + " \"type\": \"integer\"" + + " }," + + " \"plain1\": {" + + " \"type\": \"integer\"" + + " }," + + " \"user\":{" + + " \"type\":\"nested\"," + + " \"properties\":{" + + " \"first\":{" + + " \"type\":\"text\"," + + " \"fields\":{" + + " \"keyword\":{" + + " \"type\":\"keyword\"," + + " \"ignore_above\":256" + + "}" + + "}" + + "}," + + " \"last\":{" + + "\"type\":\"text\"," + + "\"fields\":{" + + " \"keyword\":{" + + " \"type\":\"keyword\"," + + " \"ignore_above\":256" + + "}" + + "}" + + "}" + + "}" + + "}" + + " }"; + + createIndex(indexName, settings, indexMapping, aliases); + + // Insert sample doc + String sampleDoc = "{" + + " \"netflow.source_ipv4_address\":\"10.50.221.10\"," + + " \"netflow.destination_transport_port\":1234," + + " \"netflow.destination_ipv4_address\":\"10.53.111.14\"," + + " \"netflow.source_transport_port\":4444" + + "}"; + + // Index doc + Request indexRequest = new Request("POST", indexName + "/_doc?refresh=wait_for"); + indexRequest.setJsonEntity(sampleDoc); + Response response = client().performRequest(indexRequest); + assertEquals(HttpStatus.SC_CREATED, response.getStatusLine().getStatusCode()); + // Refresh everything + response = client().performRequest(new Request("POST", "_refresh")); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + } + + private void createSampleDatastream(String datastreamName) throws IOException { + String indexMapping = + " \"properties\": {" + + " \"@timestamp\": {" + + " \"type\": \"date\"" + + " }," + + " \"netflow.source_ipv4_address\": {" + + " \"type\": \"ip\"" + + " }," + + " \"netflow.destination_transport_port\": {" + + " \"type\": \"integer\"" + + " }," + + " \"netflow.destination_ipv4_address\": {" + + " \"type\": \"ip\"" + + " }," + + " \"netflow.source_transport_port\": {" + + " \"type\": \"integer\"" + + " }," + + " \"netflow.event.stop\": {" + + " \"type\": \"integer\"" + + " }," + + " \"dns.event.stop\": {" + + " \"type\": \"integer\"" + + " }," + + " \"ipx.event.stop\": {" + + " \"type\": \"integer\"" + + " }," + + " \"plain1\": {" + + " \"type\": \"integer\"" + + " }," + + " \"user\":{" + + " \"type\":\"nested\"," + + " \"properties\":{" + + " \"first\":{" + + " \"type\":\"text\"," + + " \"fields\":{" + + " \"keyword\":{" + + " \"type\":\"keyword\"," + + " \"ignore_above\":256" + + "}" + + "}" + + "}," + + " \"last\":{" + + "\"type\":\"text\"," + + "\"fields\":{" + + " \"keyword\":{" + + " \"type\":\"keyword\"," + + " \"ignore_above\":256" + + "}" + + "}" + + "}" + + "}" + + "}" + + " }"; + + + // Create index template + String indexTemplateRequest = "{\n" + + " \"index_patterns\": [\"" + datastreamName + "*\"],\n" + + " \"data_stream\": { },\n" + + " \"template\": {\n" + + " \"mappings\" : {" + indexMapping + "}\n" + + " }," + + " \"priority\": 500\n" + + "}"; + + + Response response = makeRequest(client(), "PUT", "_index_template/" + datastreamName + "-template", Collections.emptyMap(), + new StringEntity(indexTemplateRequest), new BasicHeader("Content-Type", "application/json")); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + // Insert sample doc + String sampleDoc = "{" + + " \"@timestamp\":\"2023-05-06T16:21:15.000Z\"," + + " \"netflow.source_ipv4_address\":\"10.50.221.10\"," + + " \"netflow.destination_transport_port\":1234," + + " \"netflow.destination_ipv4_address\":\"10.53.111.14\"," + + " \"netflow.source_transport_port\":4444" + + "}"; + + // Index doc + Request indexRequest = new Request("POST", datastreamName + "/_doc?refresh=wait_for"); + indexRequest.setJsonEntity(sampleDoc); + response = client().performRequest(indexRequest); + assertEquals(HttpStatus.SC_CREATED, response.getStatusLine().getStatusCode()); + // Refresh everything + response = client().performRequest(new Request("POST", "_refresh")); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + } + + private void deleteDatastream(String datastreamName) throws IOException { + Request indexRequest = new Request("DELETE", "_data_stream/" + datastreamName); + Response response = client().performRequest(indexRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + } + + private final String DNS_SAMPLE = "dns-sample.json"; + private final String CLOUDTRAIL_SAMPLE = "cloudtrail-sample.json"; + private final String CLOUDTRAIL_SAMPLE_S3 = "s3-sample.json"; + + + private final String DNS_MAPPINGS = "OSMapping/dns/mappings.json"; + private final String CLOUDTRAIL_MAPPINGS = "OSMapping/cloudtrail/mappings.json"; + private final String S3_MAPPINGS = "OSMapping/s3/mappings.json"; + + private final String NETWORK_MAPPINGS = "OSMapping/network/mappings.json"; + private final String LINUX_MAPPINGS = "OSMapping/linux/mappings.json"; + private final String WINDOWS_MAPPINGS = "OSMapping/windows/mappings.json"; + private final String APACHE_ACCESS_MAPPINGS = "OSMapping/apache_access/mappings.json"; + private final String AD_LDAP_MAPPINGS = "OSMapping/ad_ldap/mappings.json"; + + private String readResource(String name) throws IOException { + try (InputStream inputStream = SecurityAnalyticsPlugin.class.getClassLoader().getResourceAsStream(name)) { + if (inputStream == null) { + throw new IOException("Resource not found: " + name); + } + try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream))) { + return reader.lines().collect(Collectors.joining("\n")); + } + } + } + + public void testReadResource() throws IOException { + String content = readResource(DNS_MAPPINGS); + assertTrue(content.contains("properties")); + } + + public void testCreateDNSMapping() throws IOException{ + String INDEX_NAME = "test_create_cloudtrail_mapping_index"; + + createSampleIndex(INDEX_NAME); + // Sample dns document + String dnsSampleDoc = readResource(DNS_SAMPLE); + // Index doc + Request indexRequest = new Request("POST", INDEX_NAME + "/_doc?refresh=wait_for"); + indexRequest.setJsonEntity(dnsSampleDoc); + //Generate automatic mappings my inserting doc + Response response = client().performRequest(indexRequest); + //Get the mappings being tested + String indexMapping = readResource(DNS_MAPPINGS); + //Parse the mappings + XContentParser parser = JsonXContent.jsonXContent + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + indexMapping); + Map mappings = (Map) parser.map().get("properties"); + GetMappingsResponse getMappingsResponse = SecurityAnalyticsClientUtils.executeGetMappingsRequest(INDEX_NAME); + + MappingsTraverser mappingsTraverser = new MappingsTraverser(getMappingsResponse.getMappings().iterator().next().value); + List flatProperties = mappingsTraverser.extractFlatNonAliasFields(); + assertTrue(flatProperties.contains("dns.answers.type")); + assertTrue(flatProperties.contains("dns.question.name")); + assertTrue(flatProperties.contains("dns.question.registered_domain")); + + //Loop over the mappings and run update request for each one specifying the index to be updated + mappings.entrySet().forEach(entry -> { + String key = entry.getKey(); + if("timestamp".equals(key)) + return; + String path = ((Map) entry.getValue()).get("path").toString(); + try { + Request updateRequest = new Request("PUT", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + updateRequest.setJsonEntity(Strings.toString(XContentFactory.jsonBuilder().map(Map.of( + "index_name", INDEX_NAME, + "field", path, + "alias", key)))); + Response apiResponse = client().performRequest(updateRequest); + assertEquals(HttpStatus.SC_OK, apiResponse.getStatusLine().getStatusCode()); + + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + + // Refresh everything + response = client().performRequest(new Request("POST", "_refresh")); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + } + + + public void testTraverseAndCopy() { + + try { + String indexName = "my_test_index"; + + String indexMappingJSON = "" + + " \"properties\": {" + + " \"netflow.event_data.SourceAddress\": {" + + " \"type\": \"ip\"" + + " }," + + " \"type\": {" + + " \"type\": \"integer\"" + + " }," + + " \"netflow.event_data.DestinationPort\": {" + + " \"type\": \"integer\"" + + " }," + + " \"netflow.event.stop\": {" + + " \"type\": \"integer\"" + + " }," + + " \"netflow.event.start\": {" + + " \"type\": \"long\"" + + " }," + + " \"plain1\": {" + + " \"type\": \"integer\"" + + " }," + + " \"user\":{" + + " \"type\":\"nested\"," + + " \"properties\":{" + + " \"first\":{" + + " \"type\":\"long\"" + + " }," + + " \"last\":{" + + " \"type\":\"text\"," + + " \"fields\":{" + + " \"keyword\":{" + + " \"type\":\"keyword\"," + + " \"ignore_above\":256" + + " }" + + " }" + + " }" + + " }" + + " }" + + "}"; + + createIndex(indexName, Settings.EMPTY, indexMappingJSON); + + Map mappings = getIndexMappingsAPI(indexName); + + MappingsTraverser mappingsTraverser; + + mappingsTraverser = new MappingsTraverser(mappings, Set.of()); + + // Copy specific paths from mappings + Map filteredMappings = mappingsTraverser.traverseAndCopyWithFilter( + Set.of("netflow.event_data.SourceAddress", "netflow.event.stop", "plain1", "user.first", "user.last") + ); + + // Now traverse filtered mapppings to confirm only copied paths are present + List paths = new ArrayList<>(); + mappingsTraverser = new MappingsTraverser(filteredMappings, Set.of()); + mappingsTraverser.addListener(new MappingsTraverser.MappingsTraverserListener() { + @Override + public void onLeafVisited(MappingsTraverser.Node node) { + paths.add(node.currentPath); + } + + @Override + public void onError(String error) { + fail("Failed traversing valid mappings"); + } + }); + mappingsTraverser.traverse(); + assertEquals(5, paths.size()); + assertTrue(paths.contains("user.first")); + assertTrue(paths.contains("user.last")); + assertTrue(paths.contains("plain1")); + assertTrue(paths.contains("netflow.event.stop")); + assertTrue(paths.contains("netflow.event_data.SourceAddress")); + + } catch (IOException e) { + fail("Error instantiating MappingsTraverser with JSON string as mappings"); + } + } + + public void testAzureMappings() throws IOException { + + String indexName = "azure-test-index"; + String sampleDoc = readResource("azure-sample.json"); + + createIndex(indexName, Settings.EMPTY); + + indexDoc(indexName, "1", sampleDoc); + + createMappingsAPI(indexName, Detector.DetectorType.AZURE.getDetectorType()); + + //Expect only "timestamp" alias to be applied + Map mappings = getIndexMappingsSAFlat(indexName); + assertTrue(mappings.containsKey("timestamp")); + + // Verify that all rules are working + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of(indexName), List.of(), + getPrePackagedRules(Detector.DetectorType.AZURE.getDetectorType()).stream().map(DetectorRule::new).collect(Collectors.toList())); + Detector detector = randomDetectorWithInputs(List.of(input), Detector.DetectorType.AZURE); + createDetector(detector); + + String request = "{\n" + + " \"size\": 1000, " + + " \"query\" : {\n" + + " \"match_all\":{}\n" + + " }\n" + + "}"; + List hits = executeSearch(".opensearch-sap-azure-detectors-queries-000001", request); + Assert.assertEquals(60, hits.size()); + } + + public void testADLDAPMappings() throws IOException { + + String indexName = "adldap-test-index"; + String sampleDoc = readResource("ad_ldap-sample.json"); + + createIndex(indexName, Settings.EMPTY); + + indexDoc(indexName, "1", sampleDoc); + + createMappingsAPI(indexName, Detector.DetectorType.AD_LDAP.getDetectorType()); + + //Expect only "timestamp" alias to be applied + Map mappings = getIndexMappingsSAFlat(indexName); + assertTrue(mappings.containsKey("timestamp")); + + // Verify that all rules are working + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of(indexName), List.of(), + getPrePackagedRules(Detector.DetectorType.AD_LDAP.getDetectorType()).stream().map(DetectorRule::new).collect(Collectors.toList())); + Detector detector = randomDetectorWithInputs(List.of(input), Detector.DetectorType.AD_LDAP); + createDetector(detector); + + String request = "{\n" + + " \"size\": 1000, " + + " \"query\" : {\n" + + " \"match_all\":{}\n" + + " }\n" + + "}"; + List hits = executeSearch(".opensearch-sap-ad_ldap-detectors-queries-000001", request); + Assert.assertEquals(11, hits.size()); + } + + public void testCloudtrailMappings() throws IOException { + + String indexName = "cloudtrail-test-index"; + String sampleDoc = readResource("cloudtrail-sample.json"); + + createIndex(indexName, Settings.EMPTY); + + indexDoc(indexName, "1", sampleDoc); + + createMappingsAPI(indexName, Detector.DetectorType.CLOUDTRAIL.getDetectorType()); + + //Expect only "timestamp" alias to be applied + Map mappings = getIndexMappingsSAFlat(indexName); + assertTrue(mappings.containsKey("timestamp")); + + // Verify that all rules are working + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of(indexName), List.of(), + getPrePackagedRules(Detector.DetectorType.CLOUDTRAIL.getDetectorType()).stream().map(DetectorRule::new).collect(Collectors.toList())); + Detector detector = randomDetectorWithInputs(List.of(input), Detector.DetectorType.CLOUDTRAIL); + createDetector(detector); + + String request = "{\n" + + " \"size\": 1000, " + + " \"query\" : {\n" + + " \"match_all\":{}\n" + + " }\n" + + "}"; + List hits = executeSearch(".opensearch-sap-cloudtrail-detectors-queries-000001", request); + Assert.assertEquals(31, hits.size()); + } + + public void testS3Mappings() throws IOException { + + String indexName = "s3-test-index"; + String sampleDoc = readResource("s3-sample.json"); + + createIndex(indexName, Settings.EMPTY); + + indexDoc(indexName, "1", sampleDoc); + + createMappingsAPI(indexName, Detector.DetectorType.S3.getDetectorType()); + + //Expect only "timestamp" alias to be applied + Map mappings = getIndexMappingsSAFlat(indexName); + assertTrue(mappings.containsKey("timestamp")); + assertTrue(mappings.containsKey("Requester")); + + // Verify that all rules are working + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of(indexName), List.of(), + getPrePackagedRules(Detector.DetectorType.S3.getDetectorType()).stream().map(DetectorRule::new).collect(Collectors.toList())); + Detector detector = randomDetectorWithInputs(List.of(input), Detector.DetectorType.S3); + createDetector(detector); + + String request = "{\n" + + " \"size\": 1000, " + + " \"query\" : {\n" + + " \"match_all\":{}\n" + + " }\n" + + "}"; + List hits = executeSearch(".opensearch-sap-s3-detectors-queries-000001", request); + Assert.assertEquals(1, hits.size()); + } +} diff --git a/src/test/java/org/opensearch/securityanalytics/mapper/MapperServiceTests.java b/src/test/java/org/opensearch/securityanalytics/mapper/MapperServiceTests.java index e98486fdd..6bb84dc89 100644 --- a/src/test/java/org/opensearch/securityanalytics/mapper/MapperServiceTests.java +++ b/src/test/java/org/opensearch/securityanalytics/mapper/MapperServiceTests.java @@ -14,6 +14,7 @@ import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.collect.ImmutableOpenMap; import org.opensearch.securityanalytics.action.GetMappingsViewResponse; +import org.opensearch.securityanalytics.util.SecurityAnalyticsException; import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; @@ -27,82 +28,84 @@ public class MapperServiceTests extends OpenSearchTestCase { - public void testCreateMappingAction_pathIsNull() throws IOException { - MapperTopicStore.putAliasMappings("test", "testMissingPath.json"); - - MapperService mapperService = spy(MapperService.class); - IndicesAdminClient client = mock(IndicesAdminClient.class); - mapperService.setIndicesAdminClient(client); - // Create fake GetIndexMappingsResponse - ImmutableOpenMap.Builder mappings = ImmutableOpenMap.builder(); - Map m = new HashMap<>(); - m.put("netflow.event_data.SourceAddress", Map.of("type", "ip")); - m.put("netflow.event_data.SourcePort", Map.of("type", "integer")); - Map properties = Map.of("properties", m); - Map root = Map.of(org.opensearch.index.mapper.MapperService.SINGLE_MAPPING_NAME, properties); - MappingMetadata mappingMetadata = new MappingMetadata(org.opensearch.index.mapper.MapperService.SINGLE_MAPPING_NAME, root); - mappings.put("my_index", mappingMetadata); - GetMappingsResponse getMappingsResponse = new GetMappingsResponse(mappings.build()); - // Setup getMappings interceptor and return fake GetMappingsResponse by calling listener.onResponse - doAnswer(invocation -> { - ActionListener l = invocation.getArgument(1); - l.onResponse(getMappingsResponse); - return null; - }).when(client).getMappings(any(GetMappingsRequest.class), any(ActionListener.class)); - - // Call CreateMappingAction - mapperService.createMappingAction("my_index", "test", false, new ActionListener() { - @Override - public void onResponse(AcknowledgedResponse acknowledgedResponse) { - - } - - @Override - public void onFailure(Exception e) { - assertTrue(e.getMessage().equals("Alias mappings are missing path for alias: [srcport]")); - } - }); - } - - public void testCreateMappingAction_multipleAliasesWithSameName() { - // We expect JSON parser to throw "duplicate fields" error - - // Setup - MapperTopicStore.putAliasMappings("test1", "testMultipleAliasesWithSameName.json"); - MapperService mapperService = spy(MapperService.class); - IndicesAdminClient client = mock(IndicesAdminClient.class); - mapperService.setIndicesAdminClient(client); - // Create fake GetIndexMappingsResponse - ImmutableOpenMap.Builder mappings = ImmutableOpenMap.builder(); - Map m = new HashMap<>(); - - m.put("netflow.event_data.SourceAddress", Map.of("type", "ip")); - m.put("netflow.event_data.DestinationPort", Map.of("type", "integer")); - Map properties = Map.of("properties", m); - Map root = Map.of(org.opensearch.index.mapper.MapperService.SINGLE_MAPPING_NAME, properties); - MappingMetadata mappingMetadata = new MappingMetadata(org.opensearch.index.mapper.MapperService.SINGLE_MAPPING_NAME, root); - mappings.put("my_index", mappingMetadata); - GetMappingsResponse getMappingsResponse = new GetMappingsResponse(mappings.build()); - // Setup getMappings interceptor and return fake GetMappingsResponse by calling listener.onResponse - doAnswer(invocation -> { - ActionListener l = invocation.getArgument(1); - l.onResponse(getMappingsResponse); - return null; - }).when(client).getMappings(any(GetMappingsRequest.class), any(ActionListener.class)); - - // Call CreateMappingAction - mapperService.createMappingAction("my_index", "test1", false, new ActionListener() { - @Override - public void onResponse(AcknowledgedResponse acknowledgedResponse) { - - } - - @Override - public void onFailure(Exception e) { - assertTrue(e.getMessage().contains("Duplicate field 'srcaddr'")); - } - }); - } +// public void testCreateMappingAction_pathIsNull() throws IOException { +// MapperTopicStore.putAliasMappings("test", "testMissingPath.json"); +// +// MapperService mapperService = spy(MapperService.class); +// IndicesAdminClient client = mock(IndicesAdminClient.class); +// mapperService.setIndicesAdminClient(client); +// // Create fake GetIndexMappingsResponse +// ImmutableOpenMap.Builder mappings = ImmutableOpenMap.builder(); +// Map m = new HashMap<>(); +// m.put("netflow.event_data.SourceAddress", Map.of("type", "ip")); +// m.put("netflow.event_data.SourcePort", Map.of("type", "integer")); +// Map properties = Map.of("properties", m); +// Map root = Map.of(org.opensearch.index.mapper.MapperService.SINGLE_MAPPING_NAME, properties); +// MappingMetadata mappingMetadata = new MappingMetadata(org.opensearch.index.mapper.MapperService.SINGLE_MAPPING_NAME, root); +// mappings.put("my_index", mappingMetadata); +// GetMappingsResponse getMappingsResponse = new GetMappingsResponse(mappings.build()); +// // Setup getMappings interceptor and return fake GetMappingsResponse by calling listener.onResponse +// doAnswer(invocation -> { +// ActionListener l = invocation.getArgument(1); +// l.onResponse(getMappingsResponse); +// return null; +// }).when(client).getMappings(any(GetMappingsRequest.class), any(ActionListener.class)); +// +// // Call CreateMappingAction +// mapperService.createMappingAction("my_index", "test", false, new ActionListener() { +// @Override +// public void onResponse(AcknowledgedResponse acknowledgedResponse) { +// +// } +// +// @Override +// public void onFailure(Exception e) { +// assertTrue(e instanceof SecurityAnalyticsException); +// assertTrue(e.getCause().getMessage().equals("Alias mappings are missing path for alias: [srcport]")); +// } +// }); +// } +// +// public void testCreateMappingAction_multipleAliasesWithSameName() { +// // We expect JSON parser to throw "duplicate fields" error +// +// // Setup +// MapperTopicStore.putAliasMappings("test1", "testMultipleAliasesWithSameName.json"); +// MapperService mapperService = spy(MapperService.class); +// IndicesAdminClient client = mock(IndicesAdminClient.class); +// mapperService.setIndicesAdminClient(client); +// // Create fake GetIndexMappingsResponse +// ImmutableOpenMap.Builder mappings = ImmutableOpenMap.builder(); +// Map m = new HashMap<>(); +// +// m.put("netflow.event_data.SourceAddress", Map.of("type", "ip")); +// m.put("netflow.event_data.DestinationPort", Map.of("type", "integer")); +// Map properties = Map.of("properties", m); +// Map root = Map.of(org.opensearch.index.mapper.MapperService.SINGLE_MAPPING_NAME, properties); +// MappingMetadata mappingMetadata = new MappingMetadata(org.opensearch.index.mapper.MapperService.SINGLE_MAPPING_NAME, root); +// mappings.put("my_index", mappingMetadata); +// GetMappingsResponse getMappingsResponse = new GetMappingsResponse(mappings.build()); +// // Setup getMappings interceptor and return fake GetMappingsResponse by calling listener.onResponse +// doAnswer(invocation -> { +// ActionListener l = invocation.getArgument(1); +// l.onResponse(getMappingsResponse); +// return null; +// }).when(client).getMappings(any(GetMappingsRequest.class), any(ActionListener.class)); +// +// // Call CreateMappingAction +// mapperService.createMappingAction("my_index", "test1", false, new ActionListener() { +// @Override +// public void onResponse(AcknowledgedResponse acknowledgedResponse) { +// +// } +// +// @Override +// public void onFailure(Exception e) { +// assertTrue(e instanceof SecurityAnalyticsException); +// assertTrue(e.getCause().getMessage().contains("Duplicate field 'srcaddr'")); +// } +// }); +// } public void testGetMappingsView_successAliasesOnlyReturned() { // We expect JSON parser to throw "duplicate fields" error diff --git a/src/test/java/org/opensearch/securityanalytics/mapper/MapperUtilsTests.java b/src/test/java/org/opensearch/securityanalytics/mapper/MapperUtilsTests.java index f9af17e1d..1cf648bf5 100644 --- a/src/test/java/org/opensearch/securityanalytics/mapper/MapperUtilsTests.java +++ b/src/test/java/org/opensearch/securityanalytics/mapper/MapperUtilsTests.java @@ -13,6 +13,7 @@ import java.io.IOException; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; public class MapperUtilsTests extends OpenSearchTestCase { @@ -31,7 +32,7 @@ public void testValidateIndexMappingsMissingSome() throws IOException { MappingMetadata mappingMetadata = new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, root); mappings.put("my_index", mappingMetadata); - List missingFields = MapperUtils.validateIndexMappings(mappings.build(), MapperTopicStore.aliasMappings("test123")); + List missingFields = MapperUtils.validateIndexMappings("my_index", mappingMetadata, MapperTopicStore.aliasMappings("test123")).getLeft(); assertEquals(3, missingFields.size()); } @@ -44,8 +45,8 @@ public void testValidateIndexMappingsEmptyMappings() throws IOException { MappingMetadata mappingMetadata = new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, root); mappings.put("my_index", mappingMetadata); - IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> MapperUtils.validateIndexMappings(mappings.build(), MapperTopicStore.aliasMappings("test123"))); - assertTrue(e.getMessage().contains("Index mappings are empty")); + IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> MapperUtils.validateIndexMappings("my_index", mappingMetadata, MapperTopicStore.aliasMappings("test123"))); + assertTrue(e.getMessage().contains("Mappings for index [my_index] are empty")); } public void testValidateIndexMappingsNoMissing() throws IOException { @@ -60,10 +61,24 @@ public void testValidateIndexMappingsNoMissing() throws IOException { MappingMetadata mappingMetadata = new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, root); mappings.put("my_index", mappingMetadata); - List missingFields = MapperUtils.validateIndexMappings(mappings.build(), MapperTopicStore.aliasMappings("test123")); + List missingFields = MapperUtils.validateIndexMappings("my_index", mappingMetadata, MapperTopicStore.aliasMappings("test123")).getLeft(); assertEquals(0, missingFields.size()); } + public void testGetAllNonAliasFieldsFromIndex_success() throws IOException { + // Create index mappings + Map m = new HashMap<>(); + m.put("netflow.event_data.SourceAddress", Map.of("type", "ip")); + m.put("alias_123", Map.of("type", "alias", "path", "netflow.event_data.SourceAddress")); + Map properties = Map.of("properties", m); + Map root = Map.of(MapperService.SINGLE_MAPPING_NAME, properties); + MappingMetadata mappingMetadata = new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, root); + + List fields = MapperUtils.getAllNonAliasFieldsFromIndex(mappingMetadata); + assertEquals(1, fields.size()); + assertEquals("netflow.event_data.SourceAddress", fields.get(0)); + } + public void testGetAllPathsFromAliasMappingsSuccess() throws IOException { MapperTopicStore.putAliasMappings("test123", "testValidAliasMappingsSimple.json"); diff --git a/src/test/java/org/opensearch/securityanalytics/mapper/MappingsTraverserTests.java b/src/test/java/org/opensearch/securityanalytics/mapper/MappingsTraverserTests.java index 627e24c69..811c65d75 100644 --- a/src/test/java/org/opensearch/securityanalytics/mapper/MappingsTraverserTests.java +++ b/src/test/java/org/opensearch/securityanalytics/mapper/MappingsTraverserTests.java @@ -19,10 +19,6 @@ public class MappingsTraverserTests extends OpenSearchTestCase { - - - - public void testTraverseValidMappings() { // 1. Parse mappings from MappingMetadata ImmutableOpenMap.Builder mappings = ImmutableOpenMap.builder(); @@ -136,30 +132,54 @@ public void onError(String error) { public void testTraverseInvalidMappings() { // 1. Parse mappings from MappingMetadata - ImmutableOpenMap.Builder mappings = ImmutableOpenMap.builder(); + Map mappings = new HashMap<>(); Map m = new HashMap<>(); m.put("netflow.event_data.SourceAddress", Map.of("type", "ip")); m.put("netflow.event_data.SourcePort", Map.of("type", "integer")); Map properties = Map.of("incorrect_properties", m); Map root = Map.of(MapperService.SINGLE_MAPPING_NAME, properties); MappingMetadata mappingMetadata = new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, root); - mappings.put("my_index", mappingMetadata); MappingsTraverser mappingsTraverser = new MappingsTraverser(mappingMetadata); - final boolean[] errorHappend = new boolean[1]; + List paths = new ArrayList<>(); mappingsTraverser.addListener(new MappingsTraverser.MappingsTraverserListener() { @Override public void onLeafVisited(MappingsTraverser.Node node) { assertNotNull(node); + paths.add(node.currentPath); } @Override public void onError(String error) { - errorHappend[0] = true; + fail("Failed traversing invalid mappings"); } }); mappingsTraverser.traverse(); - assertTrue(errorHappend[0]); + assertEquals(0, paths.size()); + } + + public void testTraverseEmptyMappings() { + Map root = Map.of(MapperService.SINGLE_MAPPING_NAME, new HashMap<>()); + MappingMetadata mappingMetadata = new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, root); + + MappingsTraverser mappingsTraverser = new MappingsTraverser(mappingMetadata); + final boolean[] errorHappend = new boolean[1]; + List paths = new ArrayList<>(); + + mappingsTraverser.addListener(new MappingsTraverser.MappingsTraverserListener() { + @Override + public void onLeafVisited(MappingsTraverser.Node node) { + assertNotNull(node); + paths.add(node.currentPath); + } + + @Override + public void onError(String error) { + fail("Failed traversing empty mappings"); + } + }); + mappingsTraverser.traverse(); + assertEquals(0, paths.size()); } public void testTraverseValidMappingsWithTypeFilter() { @@ -205,7 +225,7 @@ public void testTraverseAndCopyValidMappingsWithTypeFilter() { MappingsTraverser mappingsTraverser = new MappingsTraverser(properties, Set.of("ip")); // Copy mappings while excluding type=ip - Map filteredMappings = mappingsTraverser.traverseAndShallowCopy(); + Map filteredMappings = mappingsTraverser.traverseAndCopyAsFlat(); // Now traverse filtered mapppings to confirm type=ip is not present List paths = new ArrayList<>(); mappingsTraverser = new MappingsTraverser(filteredMappings, Set.of()); @@ -225,7 +245,7 @@ public void onError(String error) { assertEquals("netflow.event_data.SourcePort", paths.get(0)); } - public void testTraverseAndCopyValidNestedMappingsWithTypeFilter() { + public void testTraverseAndCopyAsFlatValidNestedMappingsWithTypeFilter() { String indexMappingJSON = "{" + " \"properties\": {" + " \"netflow.event_data.SourceAddress\": {" + @@ -284,7 +304,92 @@ public void testTraverseAndCopyValidNestedMappingsWithTypeFilter() { mappingsTraverser = new MappingsTraverser(indexMappingJSON, Set.of("ip")); // Copy mappings while excluding type=ip - Map filteredMappings = mappingsTraverser.traverseAndShallowCopy(); + Map filteredMappings = mappingsTraverser.traverseAndCopyAsFlat(); + + // Now traverse filtered mapppings to confirm type=ip is not present + List paths = new ArrayList<>(); + mappingsTraverser = new MappingsTraverser(filteredMappings, Set.of("integer")); + mappingsTraverser.addListener(new MappingsTraverser.MappingsTraverserListener() { + @Override + public void onLeafVisited(MappingsTraverser.Node node) { + paths.add(node.currentPath); + } + + @Override + public void onError(String error) { + fail("Failed traversing valid mappings"); + } + }); + mappingsTraverser.traverse(); + assertEquals(2, paths.size()); + assertEquals("user.last", paths.get(0)); + assertEquals("user.first", paths.get(1)); + + } catch (IOException e) { + fail("Error instantiating MappingsTraverser with JSON string as mappings"); + } + } + + public void testTraverseAndCopyValidNestedMappings() { + String indexMappingJSON = "{" + + " \"properties\": {" + + " \"netflow.event_data.SourceAddress\": {" + + " \"type\": \"ip\"" + + " }," + + " \"netflow.event_data.DestinationPort\": {" + + " \"type\": \"integer\"" + + " }," + + " \"netflow.event_data.DestAddress\": {" + + " \"type\": \"ip\"" + + " }," + + " \"netflow.event_data.SourcePort\": {" + + " \"type\": \"integer\"" + + " }," + + " \"netflow.event.stop\": {" + + " \"type\": \"integer\"" + + " }," + + " \"dns.event.stop\": {" + + " \"type\": \"integer\"" + + " }," + + " \"ipx.event.stop\": {" + + " \"type\": \"integer\"" + + " }," + + " \"plain1\": {" + + " \"type\": \"integer\"" + + " }," + + " \"user\":{" + + " \"type\":\"nested\"," + + " \"properties\":{" + + " \"first\":{" + + " \"type\":\"text\"," + + " \"fields\":{" + + " \"keyword\":{" + + " \"type\":\"keyword\"," + + " \"ignore_above\":256" + + "}" + + "}" + + "}," + + " \"last\":{" + + "\"type\":\"long\"," + + "\"fields\":{" + + " \"keyword\":{" + + " \"type\":\"keyword\"," + + " \"ignore_above\":256" + + "}" + + "}" + + "}" + + "}" + + "}" + + " }" + + "}"; + + MappingsTraverser mappingsTraverser; + try { + + mappingsTraverser = new MappingsTraverser(indexMappingJSON, Set.of("ip")); + + // Copy mappings while excluding type=ip + Map filteredMappings = mappingsTraverser.traverseAndCopyWithFilter(Set.of("user.first", "user.last")); // Now traverse filtered mapppings to confirm type=ip is not present List paths = new ArrayList<>(); @@ -302,8 +407,8 @@ public void onError(String error) { }); mappingsTraverser.traverse(); assertEquals(2, paths.size()); - assertEquals("user.first", paths.get(0)); - assertEquals("user.last", paths.get(1)); + assertEquals("user.last", paths.get(0)); + assertEquals("user.first", paths.get(1)); } catch (IOException e) { fail("Error instantiating MappingsTraverser with JSON string as mappings"); diff --git a/src/test/java/org/opensearch/securityanalytics/mapper/action/mapping/CreateIndexMappingsRequestTests.java b/src/test/java/org/opensearch/securityanalytics/mapper/action/mapping/CreateIndexMappingsRequestTests.java index 7bee983a1..f7cf63195 100644 --- a/src/test/java/org/opensearch/securityanalytics/mapper/action/mapping/CreateIndexMappingsRequestTests.java +++ b/src/test/java/org/opensearch/securityanalytics/mapper/action/mapping/CreateIndexMappingsRequestTests.java @@ -8,8 +8,8 @@ import org.opensearch.action.ActionRequestValidationException; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.securityanalytics.action.CreateIndexMappingsRequest; import org.opensearch.test.OpenSearchTestCase; diff --git a/src/test/java/org/opensearch/securityanalytics/mapper/action/mapping/GetIndexMappingsRequestTests.java b/src/test/java/org/opensearch/securityanalytics/mapper/action/mapping/GetIndexMappingsRequestTests.java index caaa0be4c..33c678f66 100644 --- a/src/test/java/org/opensearch/securityanalytics/mapper/action/mapping/GetIndexMappingsRequestTests.java +++ b/src/test/java/org/opensearch/securityanalytics/mapper/action/mapping/GetIndexMappingsRequestTests.java @@ -8,8 +8,8 @@ import org.opensearch.action.ActionRequestValidationException; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.securityanalytics.action.GetIndexMappingsRequest; import org.opensearch.test.OpenSearchTestCase; diff --git a/src/test/java/org/opensearch/securityanalytics/mapper/action/mapping/UpdateIndexMappingsRequestTests.java b/src/test/java/org/opensearch/securityanalytics/mapper/action/mapping/UpdateIndexMappingsRequestTests.java index b3cb97bc1..593ae7556 100644 --- a/src/test/java/org/opensearch/securityanalytics/mapper/action/mapping/UpdateIndexMappingsRequestTests.java +++ b/src/test/java/org/opensearch/securityanalytics/mapper/action/mapping/UpdateIndexMappingsRequestTests.java @@ -8,8 +8,8 @@ import org.opensearch.action.ActionRequestValidationException; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.securityanalytics.action.UpdateIndexMappingsRequest; import org.opensearch.test.OpenSearchTestCase; diff --git a/src/test/java/org/opensearch/securityanalytics/model/DetectorInputTests.java b/src/test/java/org/opensearch/securityanalytics/model/DetectorInputTests.java index 398cdfd3e..8ba5a991a 100644 --- a/src/test/java/org/opensearch/securityanalytics/model/DetectorInputTests.java +++ b/src/test/java/org/opensearch/securityanalytics/model/DetectorInputTests.java @@ -6,9 +6,9 @@ import org.junit.Assert; import org.opensearch.common.bytes.BytesReference; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; diff --git a/src/test/java/org/opensearch/securityanalytics/model/WriteableTests.java b/src/test/java/org/opensearch/securityanalytics/model/WriteableTests.java index 2326b541d..01f1cea70 100644 --- a/src/test/java/org/opensearch/securityanalytics/model/WriteableTests.java +++ b/src/test/java/org/opensearch/securityanalytics/model/WriteableTests.java @@ -13,15 +13,42 @@ import java.io.IOException; import java.util.List; +import static org.opensearch.securityanalytics.TestHelpers.parser; import static org.opensearch.securityanalytics.TestHelpers.randomDetector; import static org.opensearch.securityanalytics.TestHelpers.randomUser; import static org.opensearch.securityanalytics.TestHelpers.randomUserEmpty; +import static org.opensearch.securityanalytics.TestHelpers.toJsonStringWithUser; public class WriteableTests extends OpenSearchTestCase { public void testDetectorAsStream() throws IOException { Detector detector = randomDetector(List.of()); detector.setInputs(List.of(new DetectorInput("", List.of(), List.of(), List.of()))); + logger.error(toJsonStringWithUser(detector)); + BytesStreamOutput out = new BytesStreamOutput(); + detector.writeTo(out); + StreamInput sin = StreamInput.wrap(out.bytes().toBytesRef().bytes); + Detector newDetector = new Detector(sin); + Assert.assertEquals("Round tripping Detector doesn't work", detector, newDetector); + } + + public void testDetector() throws IOException { // an edge case of detector serialization that failed testDetectorAsAStream() intermittently + String detectorString = "{\"type\":\"detector\",\"name\":\"MczAuRCrve\",\"detector_type\":\"test_windows\"," + + "\"user\":{\"name\":\"QhKrfthgxw\",\"backend_roles\":[\"uYvGLCPhfX\",\"fOLkcRxMWR\"],\"roles\"" + + ":[\"YuucNpVzTm\",\"all_access\"],\"custom_attribute_names\":[\"test_attr=test\"]," + + "\"user_requested_tenant\":null},\"threat_intel_enabled\":false,\"enabled\":false,\"enabled_time\"" + + ":null,\"schedule\":{\"period\":{\"interval\":5,\"unit\":\"MINUTES\"}},\"inputs\":[{\"detector_input\"" + + ":{\"description\":\"\",\"indices\":[],\"custom_rules\":[],\"pre_packaged_rules\":[]}}],\"triggers\"" + + ":[{\"id\":\"SiWfaosBBiNA8if0E1bC\",\"name\":\"windows-trigger\",\"severity\":\"1\",\"types\"" + + ":[\"test_windows\"],\"ids\":[\"QuarksPwDump Clearing Access History\"],\"sev_levels\":[\"high\"]," + + "\"tags\":[\"T0008\"],\"actions\":[],\"detection_types\":[\"rules\"]}],\"last_update_time\":" + + "1698300892093,\"monitor_id\":[\"\"],\"workflow_ids\":[],\"bucket_monitor_id_rule_id\"" + + ":{},\"rule_topic_index\":\"\",\"alert_index\":\"\",\"alert_history_index\":\"\"," + + "\"alert_history_index_pattern\":\"\",\"findings_index\":\"\",\"findings_index_pattern\":\"\"}"; + Detector detector = Detector.parse(parser(detectorString), null, null); +// Detector detector = randomDetector(List.of()); +// detector.setInputs(List.of(new DetectorInput("", List.of(), List.of(), List.of()))); +// logger.error(toJsonStringWithUser(detector)); BytesStreamOutput out = new BytesStreamOutput(); detector.writeTo(out); StreamInput sin = StreamInput.wrap(out.bytes().toBytesRef().bytes); diff --git a/src/test/java/org/opensearch/securityanalytics/model/XContentTests.java b/src/test/java/org/opensearch/securityanalytics/model/XContentTests.java index ce1ae3806..6300740f7 100644 --- a/src/test/java/org/opensearch/securityanalytics/model/XContentTests.java +++ b/src/test/java/org/opensearch/securityanalytics/model/XContentTests.java @@ -8,8 +8,8 @@ import java.util.List; import org.junit.Assert; import org.opensearch.common.bytes.BytesReference; -import org.opensearch.common.xcontent.ToXContent; import org.opensearch.commons.authuser.User; +import org.opensearch.core.xcontent.ToXContent; import org.opensearch.test.OpenSearchTestCase; diff --git a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java new file mode 100644 index 000000000..d5d86a890 --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorMonitorRestApiIT.java @@ -0,0 +1,1238 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.resthandler; + +import static org.opensearch.securityanalytics.TestHelpers.randomAggregationRule; +import static org.opensearch.securityanalytics.TestHelpers.randomDetector; +import static org.opensearch.securityanalytics.TestHelpers.randomDetectorType; +import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithInputs; +import static org.opensearch.securityanalytics.TestHelpers.randomDoc; +import static org.opensearch.securityanalytics.TestHelpers.randomIndex; +import static org.opensearch.securityanalytics.TestHelpers.randomRule; +import static org.opensearch.securityanalytics.TestHelpers.windowsIndexMapping; +import static org.opensearch.securityanalytics.TestHelpers.randomDocOnlyNumericAndText; +import static org.opensearch.securityanalytics.TestHelpers.randomRuleWithDateKeywords; +import static org.opensearch.securityanalytics.TestHelpers.randomRuleWithKeywords; +import static org.opensearch.securityanalytics.TestHelpers.randomRuleWithStringKeywords; +import static org.opensearch.securityanalytics.TestHelpers.windowsIndexMappingOnlyNumericAndDate; +import static org.opensearch.securityanalytics.TestHelpers.randomDocOnlyNumericAndDate; +import static org.opensearch.securityanalytics.TestHelpers.windowsIndexMappingOnlyNumericAndText; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import org.apache.http.HttpStatus; +import org.junit.Assert; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.client.Request; +import org.opensearch.client.Response; +import org.opensearch.commons.alerting.model.Monitor.MonitorType; +import org.opensearch.rest.RestStatus; +import org.opensearch.search.SearchHit; +import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; +import org.opensearch.securityanalytics.SecurityAnalyticsRestTestCase; +import org.opensearch.securityanalytics.config.monitors.DetectorMonitorConfig; +import org.opensearch.securityanalytics.model.Detector; +import org.opensearch.securityanalytics.model.DetectorInput; +import org.opensearch.securityanalytics.model.DetectorRule; +import org.opensearch.securityanalytics.model.Rule; + +public class DetectorMonitorRestApiIT extends SecurityAnalyticsRestTestCase { + /** + * 1. Creates detector with 5 doc prepackaged level rules and one doc level monitor based on the given rules + * 2. Creates two aggregation rules and assigns to a detector, while removing 5 prepackaged rules + * 3. Verifies that two bucket level monitor exists + * 4. Verifies the findings + * @throws IOException + */ + public void testRemoveDocLevelRuleAddAggregationRules_verifyFindings_success() throws IOException { + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response createMappingResponse = client().performRequest(createMappingRequest); + + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + + Detector detector = randomDetector(getRandomPrePackagedRules()); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + + assertEquals(5, response.getHits().getTotalHits().value); + + Map responseBody = asMap(createResponse); + String detectorId = responseBody.get("_id").toString(); + request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + + // Verify that one document level monitor is created + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + Map detectorAsMap = (Map) hit.getSourceAsMap().get("detector"); + List monitorIds = (List) (detectorAsMap).get("monitor_id"); + + assertEquals(1, monitorIds.size()); + + String monitorId = monitorIds.get(0); + String monitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId))).get("monitor")).get("monitor_type"); + + assertEquals(MonitorType.DOC_LEVEL_MONITOR.getValue(), monitorType); + + // Create aggregation rules + String sumRuleId = createRule(randomAggregationRule( "sum", " > 2")); + String avgTermRuleId = createRule(randomAggregationRule( "avg", " > 1")); + // Update detector and empty doc level rules so detector contains only one aggregation rule + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(new DetectorRule(sumRuleId), new DetectorRule(avgTermRuleId)), + Collections.emptyList()); + Detector updatedDetector = randomDetectorWithInputs(List.of(input)); + + Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(updatedDetector)); + + assertEquals("Update detector failed", RestStatus.OK, restStatus(updateResponse)); + + Map updateResponseBody = asMap(updateResponse); + detectorId = updateResponseBody.get("_id").toString(); + + hits = executeSearch(Detector.DETECTORS_INDEX, request); + hit = hits.get(0); + detectorAsMap = (Map) hit.getSourceAsMap().get("detector"); + monitorIds = (List) (detectorAsMap).get("monitor_id"); + + assertEquals(2, monitorIds.size()); + + indexDoc(index, "1", randomDoc(2, 4, "Info")); + indexDoc(index, "2", randomDoc(3, 4, "Info")); + + // Execute two bucket level monitors + for(String id: monitorIds){ + monitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + id))).get("monitor")).get("monitor_type"); + Assert.assertEquals(MonitorType.BUCKET_LEVEL_MONITOR.getValue(), monitorType); + executeAlertingMonitor(id, Collections.emptyMap()); + } + // verify bucket level monitor findings + Map params = new HashMap<>(); + params.put("detector_id", detectorId); + Response getFindingsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + Map getFindingsBody = entityAsMap(getFindingsResponse); + + assertNotNull(getFindingsBody); + assertEquals(2, getFindingsBody.get("total_findings")); + + List aggRuleIds = List.of(sumRuleId, avgTermRuleId); + + List> findings = (List)getFindingsBody.get("findings"); + for(Map finding : findings) { + Set aggRulesFinding = ((List>)finding.get("queries")).stream().map(it -> it.get("id").toString()).collect( + Collectors.toSet()); + // Bucket monitor finding will have one rule + String aggRuleId = aggRulesFinding.iterator().next(); + + assertTrue(aggRulesFinding.contains(aggRuleId)); + + List findingDocs = (List)finding.get("related_doc_ids"); + Assert.assertEquals(2, findingDocs.size()); + assertTrue(Arrays.asList("1", "2").containsAll(findingDocs)); + } + + String findingDetectorId = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("detectorId").toString(); + assertEquals(detectorId, findingDetectorId); + + String findingIndex = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("index").toString(); + assertEquals(index, findingIndex); + } + + /** + * 1. Creates detector with 1 aggregation rule and one bucket level monitor based on the aggregation rule + * 2. Creates 5 prepackaged doc level rules and one custom doc level rule and removes the aggregation rule + * 3. Verifies that one doc level monitor exists + * 4. Verifies the findings + * @throws IOException + */ + public void testReplaceAggregationRuleWithDocRule_verifyFindings_success() throws IOException { + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response createMappingResponse = client().performRequest(createMappingRequest); + + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + + String maxRuleId = createRule(randomAggregationRule( "max", " > 2")); + List detectorRules = List.of(new DetectorRule(maxRuleId)); + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, + Collections.emptyList()); + Detector detector = randomDetectorWithInputs(List.of(input)); + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + String detectorId = responseBody.get("_id").toString(); + String request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + SearchResponse response = executeSearchAndGetResponse(Rule.CUSTOM_RULES_INDEX, request, true); + + assertEquals(1, response.getHits().getTotalHits().value); + + request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + // Verify that one bucket level monitor is created + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + Map detectorAsMap = (Map) hit.getSourceAsMap().get("detector"); + String monitorId = ((List) (detectorAsMap).get("monitor_id")).get(0); + + String monitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId))).get("monitor")).get("monitor_type"); + + assertEquals(MonitorType.BUCKET_LEVEL_MONITOR.getValue(), monitorType); + + // Create random doc rule and 5 pre-packed rules and assign to detector + String randomDocRuleId = createRule(randomRule()); + List prepackagedRules = getRandomPrePackagedRules(); + input = new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(new DetectorRule(randomDocRuleId)), + prepackagedRules.stream().map(DetectorRule::new).collect(Collectors.toList())); + Detector updatedDetector = randomDetectorWithInputs(List.of(input)); + + Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(updatedDetector)); + + assertEquals("Update detector failed", RestStatus.OK, restStatus(updateResponse)); + + Map updateResponseBody = asMap(updateResponse); + detectorId = updateResponseBody.get("_id").toString(); + + // Verify newly created doc level monitor + hits = executeSearch(Detector.DETECTORS_INDEX, request); + hit = hits.get(0); + detectorAsMap = (Map) hit.getSourceAsMap().get("detector"); + List monitorIds = ((List) (detectorAsMap).get("monitor_id")); + + assertEquals(1, monitorIds.size()); + + monitorId = monitorIds.get(0); + monitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId))).get("monitor")).get("monitor_type"); + + assertEquals(MonitorType.DOC_LEVEL_MONITOR.getValue(), monitorType); + + // Verify rules + request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + + assertEquals(6, response.getHits().getTotalHits().value); + + // Verify findings + indexDoc(index, "1", randomDoc(2, 5, "Info")); + indexDoc(index, "2", randomDoc(3, 5, "Info")); + + Response executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + // 5 prepackaged and 1 custom doc level rule + assertEquals(6, noOfSigmaRuleMatches); + + Map params = new HashMap<>(); + params.put("detector_id", detectorId); + Response getFindingsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + Map getFindingsBody = entityAsMap(getFindingsResponse); + + assertNotNull(getFindingsBody); + // When doc level monitor is being applied one finding is generated per document + assertEquals(2, getFindingsBody.get("total_findings")); + + Set docRuleIds = new HashSet<>(prepackagedRules); + docRuleIds.add(randomDocRuleId); + + List> findings = (List)getFindingsBody.get("findings"); + List foundDocIds = new ArrayList<>(); + for(Map finding : findings) { + Set aggRulesFinding = ((List>)finding.get("queries")).stream().map(it -> it.get("id").toString()).collect( + Collectors.toSet()); + + assertTrue(docRuleIds.containsAll(aggRulesFinding)); + + List findingDocs = (List)finding.get("related_doc_ids"); + Assert.assertEquals(1, findingDocs.size()); + foundDocIds.addAll(findingDocs); + } + assertTrue(Arrays.asList("1", "2").containsAll(foundDocIds)); + } + + /** + * 1. Creates detector with prepackaged doc rules + * 2. Verifies that detector with doc level monitor is created + * 3. Removes all rules and updates detector + * 4. Verifies that detector doesn't have monitors attached + * + * @throws IOException + */ + public void testRemoveAllRulesAndUpdateDetector_success() throws IOException { + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + Response createMappingResponse = client().performRequest(createMappingRequest); + + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + + List randomPrepackagedRules = getRandomPrePackagedRules(); + Detector detector = randomDetector(randomPrepackagedRules); + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + String detectorId = responseBody.get("_id").toString(); + String request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + + assertEquals(randomPrepackagedRules.size(), response.getHits().getTotalHits().value); + + request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + // Verify that one doc level monitor is created + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + Map detectorAsMap = (Map) hit.getSourceAsMap().get("detector"); + List monitorIds = ((List) (detectorAsMap).get("monitor_id")); + + assertEquals(1, monitorIds.size()); + + String monitorId = monitorIds.get(0); + String monitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId))).get("monitor")).get("monitor_type"); + + assertEquals(MonitorType.DOC_LEVEL_MONITOR.getValue(), monitorType); + + Detector updatedDetector = randomDetector(Collections.emptyList()); + Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(updatedDetector)); + + assertEquals("Update detector failed", RestStatus.OK, restStatus(updateResponse)); + + Map updateResponseBody = asMap(updateResponse); + detectorId = updateResponseBody.get("_id").toString(); + + hits = executeSearch(Detector.DETECTORS_INDEX, request); + hit = hits.get(0); + detectorAsMap = (Map) hit.getSourceAsMap().get("detector"); + + assertTrue(((List) (detectorAsMap).get("monitor_id")).isEmpty()); + } + + /** + * 1. Creates detector with aggregation rule + * 2. Adds new aggregation rule + * 3. Updates a detector + * 4. Verifies that detector has 2 custom rules attached + * 5. Execute monitors and verifies findings + * + * @throws IOException + */ + public void testAddNewAggregationRule_verifyFindings_success() throws IOException { + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response createMappingResponse = client().performRequest(createMappingRequest); + + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + + String sumRuleId = createRule(randomAggregationRule("sum", " > 1")); + List detectorRules = List.of(new DetectorRule(sumRuleId)); + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, + Collections.emptyList()); + Detector detector = randomDetectorWithInputs(List.of(input)); + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + String detectorId = responseBody.get("_id").toString(); + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + Map detectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + List inputArr = detectorMap.get("inputs"); + + assertEquals(1, ((Map>) inputArr.get(0)).get("detector_input").get("custom_rules").size()); + + // Test adding the new max monitor and updating the existing sum monitor + String maxRuleId = createRule(randomAggregationRule("max", " > 3")); + DetectorInput newInput = new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(new DetectorRule(maxRuleId), new DetectorRule(sumRuleId)), + Collections.emptyList()); + Detector updatedDetector = randomDetectorWithInputs(List.of(newInput)); + Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(updatedDetector)); + + assertEquals("Update detector failed", RestStatus.OK, restStatus(updateResponse)); + + hits = executeSearch(Detector.DETECTORS_INDEX, request); + hit = hits.get(0); + Map updatedDetectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + inputArr = updatedDetectorMap.get("inputs"); + + assertEquals(2, ((Map>) inputArr.get(0)).get("detector_input").get("custom_rules").size()); + + List monitorIds = ((List) (updatedDetectorMap).get("monitor_id")); + + assertEquals(2, monitorIds.size()); + + indexDoc(index, "1", randomDoc(2, 4, "Info")); + indexDoc(index, "2", randomDoc(3, 4, "Info")); + + for(String monitorId: monitorIds) { + Map monitor = (Map)(entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId)))).get("monitor"); + assertEquals(MonitorType.BUCKET_LEVEL_MONITOR.getValue(), monitor.get("monitor_type")); + executeAlertingMonitor(monitorId, Collections.emptyMap()); + } + + Map params = new HashMap<>(); + params.put("detector_id", detectorId); + Response getFindingsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + Map getFindingsBody = entityAsMap(getFindingsResponse); + + assertNotNull(getFindingsBody); + // Two bucket monitors are executed and only one finding is generated since maxRule is not fulfilling the trigger condition + assertEquals(1, getFindingsBody.get("total_findings")); + + Map finding = ((List) getFindingsBody.get("findings")).get(0); + + Set aggRulesFinding = ((List>) finding.get("queries")).stream().map(it -> it.get("id").toString()).collect( + Collectors.toSet()); + + assertEquals(sumRuleId, aggRulesFinding.iterator().next()); + + List findingDocs = ((List) finding.get("related_doc_ids")); + + assertEquals(2, findingDocs.size()); + assertTrue(Arrays.asList("1", "2").containsAll(findingDocs)); + + String findingDetectorId = ((Map)((List) getFindingsBody.get("findings")).get(0)).get("detectorId").toString(); + assertEquals(detectorId, findingDetectorId); + + String findingIndex = ((Map)((List) getFindingsBody.get("findings")).get(0)).get("index").toString(); + assertEquals(index, findingIndex); + } + + /** + * 1. Creates detector with 2 aggregation rule assigned + * 2. Verifies that 2 custom rules exists + * 3. Removes one rule and updates a detector + * 4. Verifies that detector has only one custom rule and one bucket level monitor + * + * @throws IOException + */ + public void testDeleteAggregationRule_verifyFindings_success() throws IOException { + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response createMappingResponse = client().performRequest(createMappingRequest); + + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + + List aggRuleIds = new ArrayList<>(); + String avgRuleId = createRule(randomAggregationRule("avg", " > 1")); + aggRuleIds.add(avgRuleId); + String countRuleId = createRule(randomAggregationRule("count", " > 1")); + aggRuleIds.add(countRuleId); + + List detectorRules = aggRuleIds.stream().map(DetectorRule::new).collect(Collectors.toList()); + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, + Collections.emptyList()); + Detector detector = randomDetectorWithInputs(List.of(input)); + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + String detectorId = responseBody.get("_id").toString(); + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + Map detectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + List inputArr = detectorMap.get("inputs"); + + assertEquals(2, ((Map>) inputArr.get(0)).get("detector_input").get("custom_rules").size()); + + // Test deleting the aggregation rule + DetectorInput newInput = new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(new DetectorRule(avgRuleId)), + Collections.emptyList()); + detector = randomDetectorWithInputs(List.of(newInput)); + Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(detector)); + + assertEquals("Update detector failed", RestStatus.OK, restStatus(updateResponse)); + + hits = executeSearch(Detector.DETECTORS_INDEX, request); + hit = hits.get(0); + Map updatedDetectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + inputArr = updatedDetectorMap.get("inputs"); + + assertEquals(1, ((Map>) inputArr.get(0)).get("detector_input").get("custom_rules").size()); + + inputArr = updatedDetectorMap.get("inputs"); + + assertEquals(1, ((Map>) inputArr.get(0)).get("detector_input").get("custom_rules").size()); + + // Verify monitors + List monitorIds = ((List) (updatedDetectorMap).get("monitor_id")); + + assertEquals(1, monitorIds.size()); + + Map monitor = (Map)(entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorIds.get(0))))).get("monitor"); + + assertEquals(MonitorType.BUCKET_LEVEL_MONITOR.getValue(), monitor.get("monitor_type")); + + indexDoc(index, "1", randomDoc(2, 4, "Info")); + indexDoc(index, "2", randomDoc(3, 4, "Info")); + indexDoc(index, "3", randomDoc(3, 4, "Test")); + executeAlertingMonitor(monitorIds.get(0), Collections.emptyMap()); + + Map params = new HashMap<>(); + params.put("detector_id", detectorId); + Response getFindingsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + Map getFindingsBody = entityAsMap(getFindingsResponse); + + assertNotNull(getFindingsBody); + + assertEquals(1, getFindingsBody.get("total_findings")); + + Map finding = ((List) getFindingsBody.get("findings")).get(0); + Set aggRulesFinding = ((List>) finding.get("queries")).stream().map(it -> it.get("id").toString()).collect( + Collectors.toSet()); + + assertEquals(avgRuleId, aggRulesFinding.iterator().next()); + + List findingDocs = (List) finding.get("related_doc_ids"); + // Matches two findings because of the opCode rule uses (Info) + assertEquals(2, findingDocs.size()); + assertTrue(Arrays.asList("1", "2").containsAll(findingDocs)); + + String findingDetectorId = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("detectorId").toString(); + assertEquals(detectorId, findingDetectorId); + + String findingIndex = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("index").toString(); + assertEquals(index, findingIndex); + } + + /** + * 1. Creates detector with 2 aggregation and prepackaged doc level rules + * 2. Replaces one aggregation rule with a new one + * 3. Verifies that number of rules is unchanged + * 4. Verifies monitor types + * 5. Verifies findings + * @throws IOException + */ + public void testReplaceAggregationRule_verifyFindings_success() throws IOException { + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response createMappingResponse = client().performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + + List aggRuleIds = new ArrayList<>(); + String avgRuleId = createRule(randomAggregationRule("avg", " > 1")); + aggRuleIds.add(avgRuleId); + String minRuleId = createRule(randomAggregationRule("min", " > 1")); + aggRuleIds.add(minRuleId); + + List detectorRules = aggRuleIds.stream().map(DetectorRule::new).collect(Collectors.toList()); + List prepackagedDocRules = getRandomPrePackagedRules(); + + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, + prepackagedDocRules.stream().map(DetectorRule::new).collect(Collectors.toList())); + Detector detector = randomDetectorWithInputs(List.of(input)); + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + String detectorId = responseBody.get("_id").toString(); + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + Map detectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + List inputArr = detectorMap.get("inputs"); + + assertEquals(2, ((Map>) inputArr.get(0)).get("detector_input").get("custom_rules").size()); + + String maxRuleId = createRule(randomAggregationRule("max", " > 2")); + DetectorInput newInput = new DetectorInput("windows detector for security analytics", List.of("windows"), + List.of(new DetectorRule(avgRuleId), new DetectorRule(maxRuleId)), + getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList())); + detector = randomDetectorWithInputs(List.of(newInput)); + createResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(detector)); + + assertEquals("Update detector failed", RestStatus.OK, restStatus(createResponse)); + + hits = executeSearch(Detector.DETECTORS_INDEX, request); + hit = hits.get(0); + Map updatedDetectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + inputArr = updatedDetectorMap.get("inputs"); + + assertEquals(2, ((Map>) inputArr.get(0)).get("detector_input").get("custom_rules").size()); + + List monitorIds = ((List) (updatedDetectorMap).get("monitor_id")); + + assertEquals(3, monitorIds.size()); + + indexDoc(index, "1", randomDoc(2, 4, "Info")); + indexDoc(index, "2", randomDoc(3, 4, "Info")); + indexDoc(index, "3", randomDoc(3, 4, "Test")); + Map numberOfMonitorTypes = new HashMap<>(); + for(String monitorId: monitorIds) { + Map monitor = (Map)(entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId)))).get("monitor"); + numberOfMonitorTypes.merge(monitor.get("monitor_type"), 1, Integer::sum); + executeAlertingMonitor(monitorId, Collections.emptyMap()); + } + + assertEquals(2, numberOfMonitorTypes.get(MonitorType.BUCKET_LEVEL_MONITOR.getValue()).intValue()); + assertEquals(1, numberOfMonitorTypes.get(MonitorType.DOC_LEVEL_MONITOR.getValue()).intValue()); + // Verify findings + Map params = new HashMap<>(); + params.put("detector_id", detectorId); + Response getFindingsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + Map getFindingsBody = entityAsMap(getFindingsResponse); + + assertNotNull(getFindingsBody); + assertEquals(5, getFindingsBody.get("total_findings")); + + String findingDetectorId = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("detectorId").toString(); + assertEquals(detectorId, findingDetectorId); + + String findingIndex = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("index").toString(); + assertEquals(index, findingIndex); + + List docLevelFinding = new ArrayList<>(); + List> findings = (List)getFindingsBody.get("findings"); + + Set docLevelRules = new HashSet<>(prepackagedDocRules); + + for(Map finding : findings) { + List> queries = (List>)finding.get("queries"); + Set findingRules = queries.stream().map(it -> it.get("id").toString()).collect(Collectors.toSet()); + // In this test case all doc level rules are matching the finding rule ids + if(docLevelRules.containsAll(findingRules)) { + docLevelFinding.addAll((List)finding.get("related_doc_ids")); + } else { + String aggRuleId = findingRules.iterator().next(); + + List findingDocs = (List)finding.get("related_doc_ids"); + Assert.assertEquals(2, findingDocs.size()); + assertTrue(Arrays.asList("1", "2").containsAll(findingDocs)); + } + } + // Verify doc level finding + assertTrue(Arrays.asList("1", "2", "3").containsAll(docLevelFinding)); + } + + public void testMinAggregationRule_findingSuccess() throws IOException { + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response createMappingResponse = client().performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + + List aggRuleIds = new ArrayList<>(); + String testOpCode = "Test"; + aggRuleIds.add(createRule(randomAggregationRule("min", " > 3", testOpCode))); + List detectorRules = aggRuleIds.stream().map(DetectorRule::new).collect(Collectors.toList()); + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, + Collections.emptyList()); + Detector detector = randomDetectorWithInputs(List.of(input)); + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + String detectorId = responseBody.get("_id").toString(); + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + Map detectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + + List monitorIds = ((List) (detectorMap).get("monitor_id")); + + indexDoc(index, "4", randomDoc(5, 3, testOpCode)); + indexDoc(index, "5", randomDoc(2, 3, testOpCode)); + indexDoc(index, "6", randomDoc(4, 3, testOpCode)); + indexDoc(index, "7", randomDoc(6, 2, testOpCode)); + indexDoc(index, "8", randomDoc(1, 1, testOpCode)); + + Map numberOfMonitorTypes = new HashMap<>(); + for (String monitorId: monitorIds) { + Map monitor = (Map)(entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId)))).get("monitor"); + numberOfMonitorTypes.merge(monitor.get("monitor_type"), 1, Integer::sum); + executeAlertingMonitor(monitorId, Collections.emptyMap()); + } + + // Verify findings + Map params = new HashMap<>(); + params.put("detector_id", detectorId); + Response getFindingsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + Map getFindingsBody = entityAsMap(getFindingsResponse); + + assertNotNull(getFindingsBody); + + List> findings = (List)getFindingsBody.get("findings"); + for (Map finding : findings) { + List findingDocs = (List)finding.get("related_doc_ids"); + Assert.assertEquals(1, findingDocs.size()); + assertTrue(Arrays.asList("7").containsAll(findingDocs)); + } + + String findingDetectorId = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("detectorId").toString(); + assertEquals(detectorId, findingDetectorId); + + String findingIndex = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("index").toString(); + assertEquals(index, findingIndex); + } + + + /** + * 1. Creates detector with aggregation and prepackaged rules + * (sum rule - should match docIds: 1, 2, 3; maxRule - 4, 5, 6, 7; minRule - 7) + * 2. Verifies monitor execution + * 3. Verifies findings + * + * @throws IOException + */ + public void testMultipleAggregationAndDocRules_findingSuccess() throws IOException { + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response createMappingResponse = client().performRequest(createMappingRequest); + + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + + String infoOpCode = "Info"; + String testOpCode = "Test"; + + // 5 custom aggregation rules + String sumRuleId = createRule(randomAggregationRule("sum", " > 1", infoOpCode)); + String maxRuleId = createRule(randomAggregationRule("max", " > 3", testOpCode)); + String minRuleId = createRule(randomAggregationRule("min", " > 3", testOpCode)); + String avgRuleId = createRule(randomAggregationRule("avg", " > 3", infoOpCode)); + String cntRuleId = createRule(randomAggregationRule("count", " > 3", "randomTestCode")); + List aggRuleIds = List.of(sumRuleId, maxRuleId); + String randomDocRuleId = createRule(randomRule()); + List prepackagedRules = getRandomPrePackagedRules(); + + List detectorRules = List.of(new DetectorRule(sumRuleId), new DetectorRule(maxRuleId), new DetectorRule(minRuleId), + new DetectorRule(avgRuleId), new DetectorRule(cntRuleId), new DetectorRule(randomDocRuleId)); + + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), detectorRules, + prepackagedRules.stream().map(DetectorRule::new).collect(Collectors.toList())); + Detector detector = randomDetectorWithInputs(List.of(input)); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + + assertEquals(6, response.getHits().getTotalHits().value); + + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + Map responseBody = asMap(createResponse); + String detectorId = responseBody.get("_id").toString(); + request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + Map updatedDetectorMap = (HashMap)(hit.getSourceAsMap().get("detector")); + List inputArr = updatedDetectorMap.get("inputs"); + + assertEquals(6, ((Map>) inputArr.get(0)).get("detector_input").get("custom_rules").size()); + + List monitorIds = ((List) (updatedDetectorMap).get("monitor_id")); + + assertEquals(6, monitorIds.size()); + + indexDoc(index, "1", randomDoc(2, 4, infoOpCode)); + indexDoc(index, "2", randomDoc(3, 4, infoOpCode)); + indexDoc(index, "3", randomDoc(1, 4, infoOpCode)); + indexDoc(index, "4", randomDoc(5, 3, testOpCode)); + indexDoc(index, "5", randomDoc(2, 3, testOpCode)); + indexDoc(index, "6", randomDoc(4, 3, testOpCode)); + indexDoc(index, "7", randomDoc(6, 2, testOpCode)); + indexDoc(index, "8", randomDoc(1, 1, testOpCode)); + + Map numberOfMonitorTypes = new HashMap<>(); + + for (String monitorId: monitorIds) { + Map monitor = (Map)(entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId)))).get("monitor"); + numberOfMonitorTypes.merge(monitor.get("monitor_type"), 1, Integer::sum); + Response executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + + // Assert monitor executions + Map executeResults = entityAsMap(executeResponse); + if (MonitorType.DOC_LEVEL_MONITOR.getValue().equals(monitor.get("monitor_type"))) { + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + // 5 prepackaged and 1 custom doc level rule + assertEquals(6, noOfSigmaRuleMatches); + } else { + for(String ruleId: aggRuleIds) { + Object rule = (((Map)((Map)((List)((Map)executeResults.get("input_results")).get("results")).get(0)).get("aggregations")).get(ruleId)); + if(rule != null) { + if(ruleId == sumRuleId) { + assertRuleMonitorFinding(executeResults, ruleId,3, List.of("4")); + } else if (ruleId == maxRuleId) { + assertRuleMonitorFinding(executeResults, ruleId,5, List.of("2", "3")); + } + else if (ruleId == minRuleId) { + assertRuleMonitorFinding(executeResults, ruleId,1, List.of("2")); + } + } + } + } + } + + assertEquals(5, numberOfMonitorTypes.get(MonitorType.BUCKET_LEVEL_MONITOR.getValue()).intValue()); + assertEquals(1, numberOfMonitorTypes.get(MonitorType.DOC_LEVEL_MONITOR.getValue()).intValue()); + + Map params = new HashMap<>(); + params.put("detector_id", detectorId); + Response getFindingsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + Map getFindingsBody = entityAsMap(getFindingsResponse); + + // Assert findings + assertNotNull(getFindingsBody); + // 8 findings from doc level rules, and 3 findings for aggregation (sum, max and min) + assertEquals(11, getFindingsBody.get("total_findings")); + + String findingDetectorId = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("detectorId").toString(); + assertEquals(detectorId, findingDetectorId); + + String findingIndex = ((Map)((List)getFindingsBody.get("findings")).get(0)).get("index").toString(); + assertEquals(index, findingIndex); + + List docLevelFinding = new ArrayList<>(); + List> findings = (List) getFindingsBody.get("findings"); + + Set docLevelRules = new HashSet<>(prepackagedRules); + docLevelRules.add(randomDocRuleId); + + for(Map finding : findings) { + List> queries = (List>)finding.get("queries"); + Set findingRuleIds = queries.stream().map(it -> it.get("id").toString()).collect(Collectors.toSet()); + // Doc level finding matches all doc level rules (including the custom one) in this test case + if(docLevelRules.containsAll(findingRuleIds)) { + docLevelFinding.addAll((List)finding.get("related_doc_ids")); + } else { + // In the case of bucket level monitors, queries will always contain one value + String aggRuleId = findingRuleIds.iterator().next(); + List findingDocs = (List)finding.get("related_doc_ids"); + + if(aggRuleId.equals(sumRuleId)) { + assertTrue(List.of("1", "2", "3").containsAll(findingDocs)); + } else if(aggRuleId.equals(maxRuleId)) { + assertTrue(List.of("4", "5", "6", "7").containsAll(findingDocs)); + } else if(aggRuleId.equals( minRuleId)) { + assertTrue(List.of("7").containsAll(findingDocs)); + } + } + } + + assertTrue(Arrays.asList("1", "2", "3", "4", "5", "6", "7", "8").containsAll(docLevelFinding)); + } + + public void testCreateDetectorWithKeywordsRule_verifyFindings_success() throws IOException { + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response createMappingResponse = client().performRequest(createMappingRequest); + + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + + // Create random doc rule + String randomDocRuleId = createRule(randomRuleWithKeywords()); + List prepackagedRules = getRandomPrePackagedRules(); + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(new DetectorRule(randomDocRuleId)), + prepackagedRules.stream().map(DetectorRule::new).collect(Collectors.toList())); + Detector detector = randomDetectorWithInputs(List.of(input)); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map updateResponseBody = asMap(createResponse); + String detectorId = updateResponseBody.get("_id").toString(); + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + + // Verify newly created doc level monitor + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + Map detectorAsMap = (Map) hit.getSourceAsMap().get("detector"); + List monitorIds = ((List) (detectorAsMap).get("monitor_id")); + + assertEquals(1, monitorIds.size()); + + String monitorId = monitorIds.get(0); + String monitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId))).get("monitor")).get("monitor_type"); + + assertEquals(MonitorType.DOC_LEVEL_MONITOR.getValue(), monitorType); + + // Verify rules + request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + + assertEquals(6, response.getHits().getTotalHits().value); + + // Verify findings + indexDoc(index, "1", randomDoc(2, 5, "Test")); + indexDoc(index, "2", randomDoc(3, 5, "Test")); + + + Response executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + // Verify 5 prepackaged rules and 1 custom rule + assertEquals(6, noOfSigmaRuleMatches); + + Map params = new HashMap<>(); + params.put("detector_id", detectorId); + Response getFindingsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + Map getFindingsBody = entityAsMap(getFindingsResponse); + + assertNotNull(getFindingsBody); + // When doc level monitor is being applied one finding is generated per document + assertEquals(2, getFindingsBody.get("total_findings")); + + Set docRuleIds = new HashSet<>(prepackagedRules); + docRuleIds.add(randomDocRuleId); + + List> findings = (List) getFindingsBody.get("findings"); + List foundDocIds = new ArrayList<>(); + for (Map finding : findings) { + Set aggRulesFinding = ((List>) finding.get("queries")).stream().map(it -> it.get("id").toString()).collect( + Collectors.toSet()); + + assertTrue(docRuleIds.containsAll(aggRulesFinding)); + + List findingDocs = (List) finding.get("related_doc_ids"); + Assert.assertEquals(1, findingDocs.size()); + foundDocIds.addAll(findingDocs); + } + assertTrue(Arrays.asList("1", "2").containsAll(foundDocIds)); + } + + public void testCreateDetectorWithKeywordsRule_ensureNoFindingsWithoutTextMapping_success() throws IOException { + String index = createTestIndex(randomIndex(), windowsIndexMappingOnlyNumericAndDate()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response createMappingResponse = client().performRequest(createMappingRequest); + + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + + // Create random doc rule + String randomDocRuleId = createRule(randomRuleWithStringKeywords()); + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(new DetectorRule(randomDocRuleId)), + Collections.emptyList()); + Detector detector = randomDetectorWithInputs(List.of(input)); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map updateResponseBody = asMap(createResponse); + String detectorId = updateResponseBody.get("_id").toString(); + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + + // Verify newly created doc level monitor + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + Map detectorAsMap = (Map) hit.getSourceAsMap().get("detector"); + List monitorIds = ((List) (detectorAsMap).get("monitor_id")); + + assertEquals(1, monitorIds.size()); + + String monitorId = monitorIds.get(0); + String monitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId))).get("monitor")).get("monitor_type"); + + assertEquals(MonitorType.DOC_LEVEL_MONITOR.getValue(), monitorType); + + // Verify rules created + request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + + assertEquals(1, response.getHits().getTotalHits().value); + + // Insert test document + indexDoc(index, "1", randomDocOnlyNumericAndDate(2, 5, "Test")); + indexDoc(index, "2", randomDocOnlyNumericAndDate(3, 5, "Test")); + + + Response executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + // Verify no rules match test document + assertEquals(0, noOfSigmaRuleMatches); + } + + public void testCreateDetectorWithKeywordsRule_ensureNoFindingsWithoutDateMapping_success() throws IOException { + String index = createTestIndex(randomIndex(), windowsIndexMappingOnlyNumericAndText()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response createMappingResponse = client().performRequest(createMappingRequest); + + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + + // Create random doc rule + String randomDocRuleId = createRule(randomRuleWithDateKeywords()); + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(new DetectorRule(randomDocRuleId)), + Collections.emptyList()); + Detector detector = randomDetectorWithInputs(List.of(input)); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map updateResponseBody = asMap(createResponse); + String detectorId = updateResponseBody.get("_id").toString(); + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + + // Verify newly created doc level monitor + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + Map detectorAsMap = (Map) hit.getSourceAsMap().get("detector"); + List monitorIds = ((List) (detectorAsMap).get("monitor_id")); + + assertEquals(1, monitorIds.size()); + + String monitorId = monitorIds.get(0); + String monitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + monitorId))).get("monitor")).get("monitor_type"); + + assertEquals(MonitorType.DOC_LEVEL_MONITOR.getValue(), monitorType); + + // Verify rules created + request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + + assertEquals(1, response.getHits().getTotalHits().value); + + // Insert test document + indexDoc(index, "1", randomDocOnlyNumericAndText(2, 5, "Test")); + indexDoc(index, "2", randomDocOnlyNumericAndText(3, 5, "Test")); + + + Response executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + // Verify no rules match test document + assertEquals(0, noOfSigmaRuleMatches); + } + + private static void assertRuleMonitorFinding(Map executeResults, String ruleId, int expectedDocCount, List expectedTriggerResult) { + List> buckets = ((List>)(((Map)((Map)((Map)((List)((Map) executeResults.get("input_results")).get("results")).get(0)).get("aggregations")).get("result_agg")).get("buckets"))); + Integer docCount = buckets.stream().mapToInt(it -> (Integer)it.get("doc_count")).sum(); + assertEquals(expectedDocCount, docCount.intValue()); + + List triggerResultBucketKeys = ((Map)((Map) ((Map)executeResults.get("trigger_results")).get(ruleId)).get("agg_result_buckets")).keySet().stream().collect(Collectors.toList()); + assertEquals(expectedTriggerResult, triggerResultBucketKeys); + } +} diff --git a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorRestApiIT.java index 9b58ae084..c2486b71c 100644 --- a/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorRestApiIT.java +++ b/src/test/java/org/opensearch/securityanalytics/resthandler/DetectorRestApiIT.java @@ -4,6 +4,9 @@ */ package org.opensearch.securityanalytics.resthandler; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; import org.apache.http.HttpEntity; import org.apache.http.HttpStatus; import org.apache.http.entity.ContentType; @@ -14,6 +17,11 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.client.Request; import org.opensearch.client.Response; +import org.opensearch.common.Strings; +import org.opensearch.common.settings.Settings; +import org.opensearch.client.ResponseException; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.commons.alerting.model.Monitor.MonitorType; import org.opensearch.rest.RestStatus; import org.opensearch.search.SearchHit; import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; @@ -29,17 +37,77 @@ import java.util.Locale; import java.util.Map; import java.util.stream.Collectors; +import org.opensearch.securityanalytics.model.DetectorTrigger; -import static org.opensearch.securityanalytics.TestHelpers.randomDetector; -import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithInputs; -import static org.opensearch.securityanalytics.TestHelpers.randomDoc; -import static org.opensearch.securityanalytics.TestHelpers.randomIndex; -import static org.opensearch.securityanalytics.TestHelpers.randomRule; -import static org.opensearch.securityanalytics.TestHelpers.windowsIndexMapping; +import static org.opensearch.securityanalytics.TestHelpers.*; public class DetectorRestApiIT extends SecurityAnalyticsRestTestCase { + public void testNewLogTypes() throws IOException { + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response response = client().performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of("github"), List.of(), List.of(), List.of(), List.of()))); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + } + @SuppressWarnings("unchecked") + public void testDeletingADetector_MonitorNotExists() throws IOException { + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response response = client().performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + // Create detector #1 of type test_windows + Detector detector1 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + String detectorId1 = createDetector(detector1); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId1 + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + String monitorId = ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + + Response deleteMonitorResponse = deleteAlertingMonitor(monitorId); + assertEquals(200, deleteMonitorResponse.getStatusLine().getStatusCode()); + entityAsMap(deleteMonitorResponse); + + Response deleteResponse = makeRequest(client(), "DELETE", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId1, Collections.emptyMap(), null); + Assert.assertEquals("Delete detector failed", RestStatus.OK, restStatus(deleteResponse)); + hits = executeSearch(Detector.DETECTORS_INDEX, request); + Assert.assertEquals(0, hits.size()); + } + + @SuppressWarnings("unchecked") public void testCreatingADetector() throws IOException { String index = createTestIndex(randomIndex(), windowsIndexMapping()); @@ -48,7 +116,7 @@ public void testCreatingADetector() throws IOException { // both req params and req body are supported createMappingRequest.setJsonEntity( "{ \"index_name\":\"" + index + "\"," + - " \"rule_topic\":\"windows\", " + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + " \"partial\":true" + "}" ); @@ -56,7 +124,7 @@ public void testCreatingADetector() throws IOException { Response response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); - Detector detector = randomDetector(getRandomPrePackagedRules()); + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -72,6 +140,9 @@ public void testCreatingADetector() throws IOException { Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("findings_index")); Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("alert_index")); + String detectorTypeInResponse = (String) ((Map)responseBody.get("detector")).get("detector_type"); + Assert.assertEquals("Detector type incorrect", randomDetectorType().toLowerCase(Locale.ROOT), detectorTypeInResponse); + String request = "{\n" + " \"query\" : {\n" + " \"match\":{\n" + @@ -93,6 +164,189 @@ public void testCreatingADetector() throws IOException { Assert.assertEquals(5, noOfSigmaRuleMatches); } + @SuppressWarnings("unchecked") + public void test_searchDetectors_detectorsIndexNotExists() throws IOException { + try { + makeRequest(client(), "DELETE", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + "d1", Collections.emptyMap(), null); + fail("delete detector call should have failed"); + } catch (IOException e) { + assertTrue(e.getMessage().contains("not found")); + } + String request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + HttpEntity requestEntity = new StringEntity(request, ContentType.APPLICATION_JSON); + Response searchResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + "_search", Collections.emptyMap(), requestEntity); + Map searchResponseBody = asMap(searchResponse); + Assert.assertNotNull("response is not null", searchResponseBody); + Map searchResponseHits = (Map) searchResponseBody.get("hits"); + Map searchResponseTotal = (Map) searchResponseHits.get("total"); + Assert.assertEquals(0, searchResponseTotal.get("value")); + } + + + public void testCreatingADetectorWithMultipleIndices() throws IOException { + String index1 = createTestIndex("windows-1", windowsIndexMapping()); + String index2 = createTestIndex("windows-2", windowsIndexMapping()); + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"windows*\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response response = client().performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + Detector detector = randomDetectorWithTriggers( + getRandomPrePackagedRules(), + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of())), + List.of(index1, index2) + ); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String createdId = responseBody.get("_id").toString(); + int createdVersion = Integer.parseInt(responseBody.get("_version").toString()); + Assert.assertNotEquals("response is missing Id", Detector.NO_ID, createdId); + Assert.assertTrue("incorrect version", createdVersion > 0); + Assert.assertEquals("Incorrect Location header", String.format(Locale.getDefault(), "%s/%s", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, createdId), createResponse.getHeader("Location")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("rule_topic_index")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("findings_index")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("alert_index")); + + String detectorTypeInResponse = (String) ((Map)responseBody.get("detector")).get("detector_type"); + Assert.assertEquals("Detector type incorrect", randomDetectorType().toLowerCase(Locale.ROOT), detectorTypeInResponse); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + createdId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + String monitorId = ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + + indexDoc(index1, "1", randomDoc()); + indexDoc(index2, "1", randomDoc()); + + Response executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(5, noOfSigmaRuleMatches); + List> results = ((List>) ((Map) executeResults.get("input_results")).get("results")); + List matchedDocs = (List) (results.get(0)).values().iterator().next(); + assertTrue(matchedDocs.get(0).equals("1|windows-1")); + assertTrue(matchedDocs.get(1).equals("1|windows-2")); + + // Check findings + Map params = new HashMap<>(); + params.put("detector_id", createdId); + Response getFindingsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + Map getFindingsBody = entityAsMap(getFindingsResponse); + assertNotNull(getFindingsBody); + Assert.assertEquals(2, getFindingsBody.get("total_findings")); + List findings = (List) getFindingsBody.get("findings"); + Assert.assertEquals(findings.size(), 2); + } + + public void testCreatingADetectorWithIndexNotExists() throws IOException { + Detector detector = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + + try { + makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + } catch (ResponseException ex) { + Assert.assertEquals(404, ex.getResponse().getStatusLine().getStatusCode()); + } + } + + public void testCreatingADetectorWithNonExistingCustomRule() throws IOException { + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response response = client().performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(new DetectorRule(java.util.UUID.randomUUID().toString())), + getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList())); + Detector detector = randomDetectorWithInputs(List.of(input)); + + try { + makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + } catch (ResponseException ex) { + Assert.assertEquals(404, ex.getResponse().getStatusLine().getStatusCode()); + } + } + + /** + * 1. Creates detector with no rules + * 2. Detector without rules and monitors created successfully + * @throws IOException + */ + public void testCreateDetectorWithoutRules() throws IOException { + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response createMappingResponse = client().performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, createMappingResponse.getStatusLine().getStatusCode()); + + Detector detector = randomDetector(Collections.emptyList()); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + // Verify rules + String request = "{\n" + + " \"query\" : {\n" + + " \"match_all\":{\n" + + " }\n" + + " }\n" + + "}"; + SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()) + "*", request, true); + Assert.assertEquals(0, response.getHits().getTotalHits().value); + + String createdId = responseBody.get("_id").toString(); + int createdVersion = Integer.parseInt(responseBody.get("_version").toString()); + Assert.assertNotEquals("response is missing Id", Detector.NO_ID, createdId); + Assert.assertTrue("incorrect version", createdVersion > 0); + Assert.assertEquals("Incorrect Location header", String.format(Locale.getDefault(), "%s/%s", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, createdId), createResponse.getHeader("Location")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("rule_topic_index")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("findings_index")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("alert_index")); + } + public void testGettingADetector() throws IOException { String index = createTestIndex(randomIndex(), windowsIndexMapping()); @@ -101,7 +355,7 @@ public void testGettingADetector() throws IOException { // both req params and req body are supported createMappingRequest.setJsonEntity( "{ \"index_name\":\"" + index + "\"," + - " \"rule_topic\":\"windows\", " + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + " \"partial\":true" + "}" ); @@ -121,6 +375,9 @@ public void testGettingADetector() throws IOException { Map responseBody = asMap(getResponse); Assert.assertEquals(createdId, responseBody.get("_id")); Assert.assertNotNull(responseBody.get("detector")); + + String detectorTypeInResponse = (String) ((Map)responseBody.get("detector")).get("detector_type"); + Assert.assertEquals("Detector type incorrect", randomDetectorType().toLowerCase(Locale.ROOT), detectorTypeInResponse); } @SuppressWarnings("unchecked") @@ -132,7 +389,7 @@ public void testSearchingDetectors() throws IOException { // both req params and req body are supported createMappingRequest.setJsonEntity( "{ \"index_name\":\"" + index + "\"," + - " \"rule_topic\":\"windows\", " + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + " \"partial\":true" + "}" ); @@ -157,8 +414,13 @@ public void testSearchingDetectors() throws IOException { Map searchResponseHits = (Map) searchResponseBody.get("hits"); Map searchResponseTotal = (Map) searchResponseHits.get("total"); Assert.assertEquals(1, searchResponseTotal.get("value")); + + List> hits = ((List>) ((Map) searchResponseBody.get("hits")).get("hits")); + Map hit = hits.get(0); + String detectorTypeInResponse = (String) ((Map) hit.get("_source")).get("detector_type"); + Assert.assertEquals("Detector type incorrect", detectorTypeInResponse, randomDetectorType().toLowerCase(Locale.ROOT)); } - + @SuppressWarnings("unchecked") public void testCreatingADetectorWithCustomRules() throws IOException { String index = createTestIndex(randomIndex(), windowsIndexMapping()); @@ -168,7 +430,7 @@ public void testCreatingADetectorWithCustomRules() throws IOException { // both req params and req body are supported createMappingRequest.setJsonEntity( "{ \"index_name\":\"" + index + "\"," + - " \"rule_topic\":\"windows\", " + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + " \"partial\":true" + "}" ); @@ -178,7 +440,7 @@ public void testCreatingADetectorWithCustomRules() throws IOException { String rule = randomRule(); - Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", "windows"), + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", randomDetectorType()), new StringEntity(rule), new BasicHeader("Content-Type", "application/json")); Assert.assertEquals("Create rule failed", RestStatus.CREATED, restStatus(createResponse)); @@ -213,6 +475,9 @@ public void testCreatingADetectorWithCustomRules() throws IOException { List hits = executeSearch(Detector.DETECTORS_INDEX, request); SearchHit hit = hits.get(0); + String detectorType = (String) ((Map) hit.getSourceAsMap().get("detector")).get("detector_type"); + Assert.assertEquals("Detector type incorrect", detectorType, randomDetectorType().toLowerCase(Locale.ROOT)); + String monitorId = ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); indexDoc(index, "1", randomDoc()); @@ -224,6 +489,100 @@ public void testCreatingADetectorWithCustomRules() throws IOException { Assert.assertEquals(6, noOfSigmaRuleMatches); } + public void testCreatingADetectorWithAggregationRules() throws IOException { + String index = createTestIndex(randomIndex(), productIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"test_windows\", " + + " \"partial\":true" + + "}" + ); + + Response response = client().performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + String customAvgRuleId = createRule(productIndexAvgAggRule()); + + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(new DetectorRule(customAvgRuleId)), + getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList())); + Detector detector = randomDetectorWithInputs(List.of(input)); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String detectorId = responseBody.get("_id").toString(); + int createdVersion = Integer.parseInt(responseBody.get("_version").toString()); + Assert.assertNotEquals("response is missing Id", Detector.NO_ID, detectorId); + Assert.assertTrue("incorrect version", createdVersion > 0); + Assert.assertEquals("Incorrect Location header", String.format(Locale.getDefault(), "%s/%s", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, detectorId), createResponse.getHeader("Location")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("rule_topic_index")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("findings_index")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("alert_index")); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + List monitorTypes = new ArrayList<>(); + + Map detectorAsMap = (Map) hit.getSourceAsMap().get("detector"); + + String bucketLevelMonitorId = ""; + + // Verify that doc level monitor is created + List monitorIds = (List) (detectorAsMap).get("monitor_id"); + + String firstMonitorId = monitorIds.get(0); + String firstMonitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + firstMonitorId))).get("monitor")).get("monitor_type"); + + if(MonitorType.BUCKET_LEVEL_MONITOR.getValue().equals(firstMonitorType)){ + bucketLevelMonitorId = firstMonitorId; + } + monitorTypes.add(firstMonitorType); + + String secondMonitorId = monitorIds.get(1); + String secondMonitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + secondMonitorId))).get("monitor")).get("monitor_type"); + monitorTypes.add(secondMonitorType); + if(MonitorType.BUCKET_LEVEL_MONITOR.getValue().equals(secondMonitorType)){ + bucketLevelMonitorId = secondMonitorId; + } + Assert.assertTrue(Arrays.asList(MonitorType.BUCKET_LEVEL_MONITOR.getValue(), MonitorType.DOC_LEVEL_MONITOR.getValue()).containsAll(monitorTypes)); + + indexDoc(index, "1", randomProductDocument()); + + Response executeResponse = executeAlertingMonitor(bucketLevelMonitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + // verify bucket level monitor findings + Map params = new HashMap<>(); + params.put("detector_id", detectorId); + Response getFindingsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + Map getFindingsBody = entityAsMap(getFindingsResponse); + assertNotNull(getFindingsBody); + Assert.assertEquals(1, getFindingsBody.get("total_findings")); + List findings = (List) getFindingsBody.get("findings"); + Assert.assertEquals(findings.size(), 1); + HashMap finding = (HashMap) findings.get(0); + Assert.assertTrue(finding.containsKey("queries")); + HashMap docLevelQuery = (HashMap) ((List) finding.get("queries")).get(0); + String ruleId = docLevelQuery.get("id").toString(); + // Verify if the rule id in bucket level finding is the same as rule used for bucket monitor creation + assertEquals(customAvgRuleId, ruleId); + Response getResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), null); + String getDetectorResponseString = new String(getResponse.getEntity().getContent().readAllBytes()); + Assert.assertTrue(getDetectorResponseString.contains(ruleId)); + } public void testUpdateADetector() throws IOException { String index = createTestIndex(randomIndex(), windowsIndexMapping()); @@ -232,7 +591,7 @@ public void testUpdateADetector() throws IOException { // both req params and req body are supported createMappingRequest.setJsonEntity( "{ \"index_name\":\"" + index + "\"," + - " \"rule_topic\":\"windows\", " + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + " \"partial\":true" + "}" ); @@ -255,12 +614,12 @@ public void testUpdateADetector() throws IOException { " }\n" + " }\n" + "}"; - SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex("windows"), request, true); - Assert.assertEquals(1579, response.getHits().getTotalHits().value); + SearchResponse response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + Assert.assertEquals(5, response.getHits().getTotalHits().value); String rule = randomRule(); - createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", "windows"), + createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", randomDetectorType()), new StringEntity(rule), new BasicHeader("Content-Type", "application/json")); Assert.assertEquals("Create rule failed", RestStatus.CREATED, restStatus(createResponse)); @@ -275,18 +634,57 @@ public void testUpdateADetector() throws IOException { Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(updatedDetector)); Assert.assertEquals("Update detector failed", RestStatus.OK, restStatus(updateResponse)); + String detectorTypeInResponse = (String) ((Map) (asMap(updateResponse).get("detector"))).get("detector_type"); + Assert.assertEquals("Detector type incorrect", randomDetectorType().toLowerCase(Locale.ROOT), detectorTypeInResponse); + request = "{\n" + " \"query\" : {\n" + " \"match_all\":{\n" + " }\n" + " }\n" + "}"; - response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex("windows"), request, true); - Assert.assertEquals(1580, response.getHits().getTotalHits().value); + response = executeSearchAndGetResponse(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request, true); + Assert.assertEquals(6, response.getHits().getTotalHits().value); + } + + public void testUpdateANonExistingDetector() throws IOException { + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(), + getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList())); + Detector updatedDetector = randomDetectorWithInputs(List.of(input)); + + try { + makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + java.util.UUID.randomUUID(), Collections.emptyMap(), toHttpEntity(updatedDetector)); + } catch (ResponseException ex) { + Assert.assertEquals(404, ex.getResponse().getStatusLine().getStatusCode()); + } + } + + public void testUpdateADetectorWithIndexNotExists() throws IOException { + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(), + getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList())); + Detector updatedDetector = randomDetectorWithInputs(List.of(input)); + + try { + makeRequest(client(), "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + java.util.UUID.randomUUID(), Collections.emptyMap(), toHttpEntity(updatedDetector)); + } catch (ResponseException ex) { + Assert.assertEquals(404, ex.getResponse().getStatusLine().getStatusCode()); + } } @SuppressWarnings("unchecked") - public void testDeletingADetector() throws IOException { + public void testDeletingADetector_single_ruleTopicIndex() throws IOException { String index = createTestIndex(randomIndex(), windowsIndexMapping()); // Execute CreateMappingsAction to add alias mapping for index @@ -294,15 +692,399 @@ public void testDeletingADetector() throws IOException { // both req params and req body are supported createMappingRequest.setJsonEntity( "{ \"index_name\":\"" + index + "\"," + - " \"rule_topic\":\"windows\", " + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + " \"partial\":true" + "}" ); Response response = client().performRequest(createMappingRequest); assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + // Create detector #1 of type test_windows + Detector detector1 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + String detectorId1 = createDetector(detector1); - Detector detector = randomDetector(getRandomPrePackagedRules()); + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId1 + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + String monitorId = ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + + indexDoc(index, "1", randomDoc()); + + Response executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(5, noOfSigmaRuleMatches); + // Create detector #2 of type windows + Detector detector2 = randomDetectorWithTriggers(getRandomPrePackagedRules(), List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of()))); + String detectorId2 = createDetector(detector2); + + request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId2 + "\"\n" + + " }\n" + + " }\n" + + "}"; + hits = executeSearch(Detector.DETECTORS_INDEX, request); + hit = hits.get(0); + + monitorId = ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + + indexDoc(index, "2", randomDoc()); + + executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + executeResults = entityAsMap(executeResponse); + noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(5, noOfSigmaRuleMatches); + + Response deleteResponse = makeRequest(client(), "DELETE", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId1, Collections.emptyMap(), null); + Assert.assertEquals("Delete detector failed", RestStatus.OK, restStatus(deleteResponse)); + // We deleted 1 detector, but 1 detector with same type exists, so we expect queryIndex to be present + Assert.assertTrue(doesIndexExist(String.format(Locale.ROOT, ".opensearch-sap-%s-detectors-queries-000001", "test_windows"))); + + deleteResponse = makeRequest(client(), "DELETE", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId2, Collections.emptyMap(), null); + Assert.assertEquals("Delete detector failed", RestStatus.OK, restStatus(deleteResponse)); + // We deleted all detectors of type windows, so we expect that queryIndex is deleted + Assert.assertFalse(doesIndexExist(String.format(Locale.ROOT, ".opensearch-sap-%s-detectors-queries-000001", "test_windows"))); + + request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId1 + "\"\n" + + " }\n" + + " }\n" + + "}"; + hits = executeSearch(Detector.DETECTORS_INDEX, request); + Assert.assertEquals(0, hits.size()); + + request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId2 + "\"\n" + + " }\n" + + " }\n" + + "}"; + hits = executeSearch(Detector.DETECTORS_INDEX, request); + Assert.assertEquals(0, hits.size()); + } + + public void testDeletingADetector_oneDetectorType_multiple_ruleTopicIndex() throws IOException { + String index1 = "test_index_1"; + createIndex(index1, Settings.EMPTY); + String index2 = "test_index_2"; + createIndex(index2, Settings.EMPTY); + // Insert doc with 900 fields to update mappings too + String doc = createDocumentWithNFields(900); + indexDoc(index1, "1", doc); + indexDoc(index2, "1", doc); + + // Create detector #1 of type test_windows + Detector detector1 = randomDetectorWithTriggers( + getRandomPrePackagedRules(), + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of())), + List.of(index1) + ); + String detectorId1 = createDetector(detector1); + + // Create detector #2 of type test_windows + Detector detector2 = randomDetectorWithTriggers( + getRandomPrePackagedRules(), + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(randomDetectorType()), List.of(), List.of(), List.of(), List.of())), + List.of(index2) + ); + + String detectorId2 = createDetector(detector2); + + Assert.assertTrue(doesIndexExist(".opensearch-sap-test_windows-detectors-queries-000001")); + Assert.assertTrue(doesIndexExist(".opensearch-sap-test_windows-detectors-queries-000002")); + + // Check if both query indices have proper settings applied from index template + Map settings = getIndexSettingsAsMap(".opensearch-sap-test_windows-detectors-queries-000001"); + assertTrue(settings.containsKey("index.analysis.char_filter.rule_ws_filter.pattern")); + assertTrue(settings.containsKey("index.hidden")); + settings = getIndexSettingsAsMap(".opensearch-sap-test_windows-detectors-queries-000002"); + assertTrue(settings.containsKey("index.analysis.char_filter.rule_ws_filter.pattern")); + assertTrue(settings.containsKey("index.hidden")); + + Response deleteResponse = makeRequest(client(), "DELETE", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId1, Collections.emptyMap(), null); + Assert.assertEquals("Delete detector failed", RestStatus.OK, restStatus(deleteResponse)); + // We deleted 1 detector, but 1 detector with same type exists, so we expect queryIndex to be present + Assert.assertFalse(doesIndexExist(String.format(Locale.getDefault(), ".opensearch-sap-%s-detectors-queries-000001", "test_windows"))); + Assert.assertTrue(doesIndexExist(String.format(Locale.getDefault(), ".opensearch-sap-%s-detectors-queries-000002", "test_windows"))); + + deleteResponse = makeRequest(client(), "DELETE", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId2, Collections.emptyMap(), null); + Assert.assertEquals("Delete detector failed", RestStatus.OK, restStatus(deleteResponse)); + // We deleted all detectors of type windows, so we expect that queryIndex is deleted + Assert.assertFalse(doesIndexExist(String.format(Locale.getDefault(), ".opensearch-sap-%s-detectors-queries-000001", "test_windows"))); + Assert.assertFalse(doesIndexExist(String.format(Locale.getDefault(), ".opensearch-sap-%s-detectors-queries-000002", "test_windows"))); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId1 + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + Assert.assertEquals(0, hits.size()); + + request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId2 + "\"\n" + + " }\n" + + " }\n" + + "}"; + hits = executeSearch(Detector.DETECTORS_INDEX, request); + Assert.assertEquals(0, hits.size()); + } + + public void testDeletingANonExistingDetector() throws IOException { + try { + makeRequest(client(), "DELETE", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + java.util.UUID.randomUUID(), Collections.emptyMap(), null); + } catch (ResponseException ex) { + Assert.assertEquals(404, ex.getResponse().getStatusLine().getStatusCode()); + } + } + + public void testCreatingADetectorWithTimestampFieldAliasMapping() throws IOException { + String index = createTestIndex(randomIndex(), productIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"test_windows\", " + + " \"partial\":true" + + "}" + ); + + Response response = client().performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + Request updateRequest = new Request("PUT", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + updateRequest.setJsonEntity(Strings.toString(XContentFactory.jsonBuilder().map(Map.of( + "index_name", index, + "field", "time", + "alias", "timestamp")))); + Response apiResponse = client().performRequest(updateRequest); + assertEquals(HttpStatus.SC_OK, apiResponse.getStatusLine().getStatusCode()); + + String customAvgRuleId = createRule(productIndexAvgAggRule()); + + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(new DetectorRule(customAvgRuleId)), + getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList())); + Detector detector = randomDetectorWithInputs(List.of(input)); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String detectorId = responseBody.get("_id").toString(); + int createdVersion = Integer.parseInt(responseBody.get("_version").toString()); + Assert.assertNotEquals("response is missing Id", Detector.NO_ID, detectorId); + Assert.assertTrue("incorrect version", createdVersion > 0); + Assert.assertEquals("Incorrect Location header", String.format(Locale.getDefault(), "%s/%s", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, detectorId), createResponse.getHeader("Location")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("rule_topic_index")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("findings_index")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("alert_index")); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + List monitorTypes = new ArrayList<>(); + + Map detectorAsMap = (Map) hit.getSourceAsMap().get("detector"); + + String bucketLevelMonitorId = ""; + + // Verify that doc level monitor is created + List monitorIds = (List) (detectorAsMap).get("monitor_id"); + + String firstMonitorId = monitorIds.get(0); + String firstMonitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + firstMonitorId))).get("monitor")).get("monitor_type"); + + if(MonitorType.BUCKET_LEVEL_MONITOR.getValue().equals(firstMonitorType)){ + bucketLevelMonitorId = firstMonitorId; + } + monitorTypes.add(firstMonitorType); + + String secondMonitorId = monitorIds.get(1); + String secondMonitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + secondMonitorId))).get("monitor")).get("monitor_type"); + monitorTypes.add(secondMonitorType); + if(MonitorType.BUCKET_LEVEL_MONITOR.getValue().equals(secondMonitorType)){ + bucketLevelMonitorId = secondMonitorId; + } + Assert.assertTrue(Arrays.asList(MonitorType.BUCKET_LEVEL_MONITOR.getValue(), MonitorType.DOC_LEVEL_MONITOR.getValue()).containsAll(monitorTypes)); + + indexDoc(index, "1", randomProductDocumentWithTime(System.currentTimeMillis())); + + Response executeResponse = executeAlertingMonitor(bucketLevelMonitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + // verify bucket level monitor findings + Map params = new HashMap<>(); + params.put("detector_id", detectorId); + Response getFindingsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + Map getFindingsBody = entityAsMap(getFindingsResponse); + assertNotNull(getFindingsBody); + Assert.assertEquals(1, getFindingsBody.get("total_findings")); + List findings = (List) getFindingsBody.get("findings"); + Assert.assertEquals(findings.size(), 1); + HashMap finding = (HashMap) findings.get(0); + Assert.assertTrue(finding.containsKey("queries")); + HashMap docLevelQuery = (HashMap) ((List) finding.get("queries")).get(0); + String ruleId = docLevelQuery.get("id").toString(); + // Verify if the rule id in bucket level finding is the same as rule used for bucket monitor creation + assertEquals(customAvgRuleId, ruleId); + Response getResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), null); + String getDetectorResponseString = new String(getResponse.getEntity().getContent().readAllBytes()); + Assert.assertTrue(getDetectorResponseString.contains(ruleId)); + } + + public void testCreatingADetectorWithTimestampFieldAliasMapping_verifyTimeRangeInBucketMonitor() throws IOException { + String index = createTestIndex(randomIndex(), productIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"test_windows\", " + + " \"partial\":true" + + "}" + ); + + Response response = client().performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + Request updateRequest = new Request("PUT", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + updateRequest.setJsonEntity(Strings.toString(XContentFactory.jsonBuilder().map(Map.of( + "index_name", index, + "field", "time", + "alias", "timestamp")))); + Response apiResponse = client().performRequest(updateRequest); + assertEquals(HttpStatus.SC_OK, apiResponse.getStatusLine().getStatusCode()); + + String customAvgRuleId = createRule(productIndexAvgAggRule()); + + DetectorInput input = new DetectorInput("windows detector for security analytics", List.of("windows"), List.of(new DetectorRule(customAvgRuleId)), + getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList())); + Detector detector = randomDetectorWithInputs(List.of(input)); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String detectorId = responseBody.get("_id").toString(); + int createdVersion = Integer.parseInt(responseBody.get("_version").toString()); + Assert.assertNotEquals("response is missing Id", Detector.NO_ID, detectorId); + Assert.assertTrue("incorrect version", createdVersion > 0); + Assert.assertEquals("Incorrect Location header", String.format(Locale.getDefault(), "%s/%s", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, detectorId), createResponse.getHeader("Location")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("rule_topic_index")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("findings_index")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("alert_index")); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + detectorId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + List monitorTypes = new ArrayList<>(); + + Map detectorAsMap = (Map) hit.getSourceAsMap().get("detector"); + + String bucketLevelMonitorId = ""; + + // Verify that doc level monitor is created + List monitorIds = (List) (detectorAsMap).get("monitor_id"); + + String firstMonitorId = monitorIds.get(0); + String firstMonitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + firstMonitorId))).get("monitor")).get("monitor_type"); + + if(MonitorType.BUCKET_LEVEL_MONITOR.getValue().equals(firstMonitorType)){ + bucketLevelMonitorId = firstMonitorId; + } + monitorTypes.add(firstMonitorType); + + String secondMonitorId = monitorIds.get(1); + String secondMonitorType = ((Map) entityAsMap(client().performRequest(new Request("GET", "/_plugins/_alerting/monitors/" + secondMonitorId))).get("monitor")).get("monitor_type"); + monitorTypes.add(secondMonitorType); + if(MonitorType.BUCKET_LEVEL_MONITOR.getValue().equals(secondMonitorType)){ + bucketLevelMonitorId = secondMonitorId; + } + Assert.assertTrue(Arrays.asList(MonitorType.BUCKET_LEVEL_MONITOR.getValue(), MonitorType.DOC_LEVEL_MONITOR.getValue()).containsAll(monitorTypes)); + + indexDoc(index, "1", randomProductDocumentWithTime(System.currentTimeMillis()-1000*60*70)); // doc's timestamp is older than 1 hr + + Response executeResponse = executeAlertingMonitor(bucketLevelMonitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + // verify bucket level monitor findings + Map params = new HashMap<>(); + params.put("detector_id", detectorId); + Response getFindingsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + Map getFindingsBody = entityAsMap(getFindingsResponse); + assertNotNull(getFindingsBody); + Assert.assertEquals(0, getFindingsBody.get("total_findings")); + List findings = (List) getFindingsBody.get("findings"); + Assert.assertEquals(findings.size(), 0); //there should be no findings as doc is not in time range of current run + } + + public void testDetector_withDatastream_withTemplateField_endToEnd_success() throws IOException { + String datastream = "test_datastream"; + + createSampleDatastream(datastream, windowsIndexMapping(), false); + // Execute CreateMappingsAction to add alias mapping for index + createMappingsAPI(datastream, randomDetectorType()); + + String writeIndex = getDatastreamWriteIndex(datastream); + + // Verify mappings + Map props = getIndexMappingsAPIFlat(writeIndex); + assertTrue(props.containsKey("windows-event_data-CommandLine")); + assertTrue(props.containsKey("event_uid")); + assertTrue(props.containsKey("windows-hostname")); + assertTrue(props.containsKey("windows-message")); + assertTrue(props.containsKey("windows-provider-name")); + assertTrue(props.containsKey("windows-servicename")); + + + // Get applied mappings + props = getIndexMappingsSAFlat(datastream); + assertEquals(6, props.size()); + assertTrue(props.containsKey("windows-event_data-CommandLine")); + assertTrue(props.containsKey("event_uid")); + assertTrue(props.containsKey("windows-hostname")); + assertTrue(props.containsKey("windows-message")); + assertTrue(props.containsKey("windows-provider-name")); + assertTrue(props.containsKey("windows-servicename")); + + // Create detector + Detector detector = randomDetectorWithInputsAndTriggers(List.of(new DetectorInput("windows detector for security analytics", List.of(datastream), List.of(), + getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList()))), + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(), List.of(), List.of("attack.defense_evasion"), List.of()))); Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); @@ -310,6 +1092,16 @@ public void testDeletingADetector() throws IOException { Map responseBody = asMap(createResponse); String createdId = responseBody.get("_id").toString(); + int createdVersion = Integer.parseInt(responseBody.get("_version").toString()); + Assert.assertNotEquals("response is missing Id", Detector.NO_ID, createdId); + Assert.assertTrue("incorrect version", createdVersion > 0); + Assert.assertEquals("Incorrect Location header", String.format(Locale.getDefault(), "%s/%s", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, createdId), createResponse.getHeader("Location")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("rule_topic_index")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("findings_index")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("alert_index")); + + String detectorTypeInResponse = (String) ((Map)responseBody.get("detector")).get("detector_type"); + Assert.assertEquals("Detector type incorrect", randomDetectorType().toLowerCase(Locale.ROOT), detectorTypeInResponse); String request = "{\n" + " \"query\" : {\n" + @@ -323,15 +1115,125 @@ public void testDeletingADetector() throws IOException { String monitorId = ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); - Response deleteResponse = makeRequest(client(), "DELETE", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + createdId, Collections.emptyMap(), null); - Assert.assertEquals("Delete detector failed", RestStatus.OK, restStatus(deleteResponse)); + indexDoc(datastream, "1", randomDoc()); - Assert.assertFalse(alertingMonitorExists(monitorId)); + Response executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(5, noOfSigmaRuleMatches); - // todo: change to assertFalse when alerting bug is fixed. https://github.com/opensearch-project/alerting/issues/581 - Assert.assertTrue(doesIndexExist(String.format(Locale.getDefault(), ".opensearch-sap-detectors-queries-%s", "windows"))); + refreshAllIndices(); + + // Call GetAlerts API + Map params = new HashMap<>(); + params.put("detectorType", randomDetectorType()); + Response getAlertsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); + Map getAlertsBody = asMap(getAlertsResponse); + // TODO enable asserts here when able + Assert.assertEquals(1, getAlertsBody.get("total_alerts")); + + // Call GetFindings API + params = new HashMap<>(); + params.put("detector_id", createdId); + Response getFindingsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + Map getFindingsBody = entityAsMap(getFindingsResponse); + assertNotNull(getFindingsBody); + Assert.assertEquals(1, getFindingsBody.get("total_findings")); + List findings = (List) getFindingsBody.get("findings"); + Assert.assertEquals(findings.size(), 1); + + deleteDatastreamAPI(datastream); + } - hits = executeSearch(Detector.DETECTORS_INDEX, request); - Assert.assertEquals(0, hits.size()); + public void testDetector_withAlias_endToEnd_success() throws IOException { + String writeIndex = "my_windows_log-1"; + String indexAlias = "test_alias"; + + createIndex(writeIndex, Settings.EMPTY, windowsIndexMapping(), "\"" + indexAlias + "\":{}"); + // Execute CreateMappingsAction to add alias mapping for index + createMappingsAPI(indexAlias, randomDetectorType()); + + // Verify mappings + Map props = getIndexMappingsAPIFlat(writeIndex); + assertTrue(props.containsKey("windows-event_data-CommandLine")); + assertTrue(props.containsKey("event_uid")); + assertTrue(props.containsKey("windows-hostname")); + assertTrue(props.containsKey("windows-message")); + assertTrue(props.containsKey("windows-provider-name")); + assertTrue(props.containsKey("windows-servicename")); + + + // Get applied mappings + props = getIndexMappingsSAFlat(indexAlias); + assertEquals(6, props.size()); + assertTrue(props.containsKey("windows-event_data-CommandLine")); + assertTrue(props.containsKey("event_uid")); + assertTrue(props.containsKey("windows-hostname")); + assertTrue(props.containsKey("windows-message")); + assertTrue(props.containsKey("windows-provider-name")); + assertTrue(props.containsKey("windows-servicename")); + + // Create detector + Detector detector = randomDetectorWithInputsAndTriggers(List.of(new DetectorInput("windows detector for security analytics", List.of(indexAlias), List.of(), + getRandomPrePackagedRules().stream().map(DetectorRule::new).collect(Collectors.toList()))), + List.of(new DetectorTrigger(null, "test-trigger", "1", List.of(), List.of(), List.of(), List.of("attack.defense_evasion"), List.of()))); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String createdId = responseBody.get("_id").toString(); + int createdVersion = Integer.parseInt(responseBody.get("_version").toString()); + Assert.assertNotEquals("response is missing Id", Detector.NO_ID, createdId); + Assert.assertTrue("incorrect version", createdVersion > 0); + Assert.assertEquals("Incorrect Location header", String.format(Locale.getDefault(), "%s/%s", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, createdId), createResponse.getHeader("Location")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("rule_topic_index")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("findings_index")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("alert_index")); + + String detectorTypeInResponse = (String) ((Map)responseBody.get("detector")).get("detector_type"); + Assert.assertEquals("Detector type incorrect", randomDetectorType().toLowerCase(Locale.ROOT), detectorTypeInResponse); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + createdId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + String monitorId = ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + + indexDoc(indexAlias, "1", randomDoc()); + + Response executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(5, noOfSigmaRuleMatches); + + refreshAllIndices(); + + // Call GetAlerts API + Map params = new HashMap<>(); + params.put("detectorType", randomDetectorType()); + Response getAlertsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.ALERTS_BASE_URI, params, null); + Map getAlertsBody = asMap(getAlertsResponse); + // TODO enable asserts here when able + Assert.assertEquals(1, getAlertsBody.get("total_alerts")); + + // Call GetFindings API + params = new HashMap<>(); + params.put("detector_id", createdId); + Response getFindingsResponse = makeRequest(client(), "GET", SecurityAnalyticsPlugin.FINDINGS_BASE_URI + "/_search", params, null); + Map getFindingsBody = entityAsMap(getFindingsResponse); + assertNotNull(getFindingsBody); + Assert.assertEquals(1, getFindingsBody.get("total_findings")); + List findings = (List) getFindingsBody.get("findings"); + Assert.assertEquals(findings.size(), 1); } } \ No newline at end of file diff --git a/src/test/java/org/opensearch/securityanalytics/resthandler/RuleRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/resthandler/RuleRestApiIT.java index 74ed31f94..c025dafab 100644 --- a/src/test/java/org/opensearch/securityanalytics/resthandler/RuleRestApiIT.java +++ b/src/test/java/org/opensearch/securityanalytics/resthandler/RuleRestApiIT.java @@ -11,8 +11,17 @@ import org.opensearch.client.Request; import org.opensearch.client.Response; import org.opensearch.client.ResponseException; +import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorExtAggregationBuilder; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.core.xcontent.XContentParser.Token; import org.opensearch.rest.RestStatus; import org.opensearch.search.SearchHit; +import org.opensearch.search.aggregations.AggregatorFactories; import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; import org.opensearch.securityanalytics.SecurityAnalyticsRestTestCase; import org.opensearch.securityanalytics.config.monitors.DetectorMonitorConfig; @@ -22,12 +31,17 @@ import org.opensearch.securityanalytics.model.Rule; import java.io.IOException; +import java.io.UnsupportedEncodingException; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.stream.Collectors; +import org.opensearch.securityanalytics.rules.backend.OSQueryBackend.AggregationQueries; +import org.opensearch.securityanalytics.rules.exceptions.SigmaError; +import static org.opensearch.securityanalytics.TestHelpers.randomDetectorType; +import static org.opensearch.securityanalytics.TestHelpers.countAggregationTestRule; import static org.opensearch.securityanalytics.TestHelpers.randomDetectorWithInputs; import static org.opensearch.securityanalytics.TestHelpers.randomDoc; import static org.opensearch.securityanalytics.TestHelpers.randomEditedRule; @@ -41,7 +55,7 @@ public class RuleRestApiIT extends SecurityAnalyticsRestTestCase { public void testCreatingARule() throws IOException { String rule = randomRule(); - Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", "windows"), + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", randomDetectorType()), new StringEntity(rule), new BasicHeader("Content-Type", "application/json")); Assert.assertEquals("Create rule failed", RestStatus.CREATED, restStatus(createResponse)); @@ -61,7 +75,7 @@ public void testCreatingARule() throws IOException { " \"query\": {\n" + " \"bool\": {\n" + " \"must\": [\n" + - " { \"match\": {\"rule.category\": \"windows\"}}\n" + + " { \"match\": {\"rule.category\": \"" + randomDetectorType().toLowerCase(Locale.ROOT) + "\"}}\n" + " ]\n" + " }\n" + " }\n" + @@ -89,12 +103,67 @@ public void testCreatingARule() throws IOException { Assert.assertEquals(0, hits.size()); } + public void testCreatingARule_incorrect_category() throws IOException { + String rule = randomRule(); + + try { + makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", "unknown_category"), + new StringEntity(rule), new BasicHeader("Content-Type", "application/json")); + fail("expected exception due to invalid category"); + } catch (ResponseException e) { + assertEquals(HttpStatus.SC_BAD_REQUEST, e.getResponse().getStatusLine().getStatusCode()); + Assert.assertTrue( + e.getMessage().contains("Invalid rule category") + ); + } + } + + public void testCreatingAggregationRule() throws SigmaError, IOException { + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", "windows"), + new StringEntity(countAggregationTestRule()), new BasicHeader("Content-Type", "application/json")); + Assert.assertEquals("Create rule failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String createdId = responseBody.get("_id").toString(); + int createdVersion = Integer.parseInt(responseBody.get("_version").toString()); + Assert.assertNotEquals("response is missing Id", Detector.NO_ID, createdId); + Assert.assertTrue("incorrect version", createdVersion > 0); + Assert.assertEquals("Incorrect Location header", String.format(Locale.getDefault(), "%s/%s", SecurityAnalyticsPlugin.RULE_BASE_URI, createdId), createResponse.getHeader("Location")); + + String index = Rule.CUSTOM_RULES_INDEX; + String request = "{\n" + + " \"query\": {\n" + + " \"nested\": {\n" + + " \"path\": \"rule\",\n" + + " \"query\": {\n" + + " \"bool\": {\n" + + " \"must\": [\n" + + " { \"match\": {\"rule.category\": \"windows\"}}\n" + + " ]\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; + + List hits = executeSearch(index, request); + + XContentParser xcp = XContentFactory.xContent(XContentType.JSON) + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, hits.get(0).getSourceAsString()); + Rule result = Rule.docParse(xcp, null, null); + + Assert.assertEquals(1, result.getAggregationQueries().size()); + String expected = "{\"aggQuery\":\"{\\\"result_agg\\\":{\\\"terms\\\":{\\\"field\\\":\\\"_index\\\"}}}\",\"bucketTriggerQuery\":\"{\\\"buckets_path\\\":{\\\"_cnt\\\":\\\"_cnt\\\"},\\\"parent_bucket_path\\\":\\\"result_agg\\\",\\\"script\\\":{\\\"source\\\":\\\"params._cnt > 1.0\\\",\\\"lang\\\":\\\"painless\\\"}}\"}"; + Assert.assertEquals(expected, result.getAggregationQueries().get(0).getValue()); + } + @SuppressWarnings("unchecked") public void testCreatingARuleWithWrongSyntax() throws IOException { String rule = randomRuleWithErrors(); try { - makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", "windows"), + makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", randomDetectorType()), new StringEntity(rule), new BasicHeader("Content-Type", "application/json")); } catch (ResponseException ex) { Map responseBody = asMap(ex.getResponse()); @@ -112,7 +181,7 @@ public void testSearchingPrepackagedRules() throws IOException { " \"query\": {\n" + " \"bool\": {\n" + " \"must\": [\n" + - " { \"match\": {\"rule.category\": \"windows\"}}\n" + + " { \"match\": {\"rule.category\": \"" + randomDetectorType().toLowerCase(Locale.ROOT) + "\"}}\n" + " ]\n" + " }\n" + " }\n" + @@ -125,7 +194,7 @@ public void testSearchingPrepackagedRules() throws IOException { Assert.assertEquals("Searching rules failed", RestStatus.OK, restStatus(searchResponse)); Map responseBody = asMap(searchResponse); - Assert.assertEquals(1579, ((Map) ((Map) responseBody.get("hits")).get("total")).get("value")); + Assert.assertEquals(5, ((Map) ((Map) responseBody.get("hits")).get("total")).get("value")); } @SuppressWarnings("unchecked") @@ -142,7 +211,8 @@ public void testSearchingPrepackagedRulesByMitreAttackID() throws IOException { " }\n" + " }\n" + " }\n" + - " }\n" + + " },\n" + + " \"_source\": [\"rule.query_field_names\"]" + "}"; Response searchResponse = makeRequest(client(), "POST", String.format(Locale.getDefault(), "%s/_search", SecurityAnalyticsPlugin.RULE_BASE_URI), Collections.singletonMap("pre_packaged", "true"), @@ -151,6 +221,12 @@ public void testSearchingPrepackagedRulesByMitreAttackID() throws IOException { Map responseBody = asMap(searchResponse); Assert.assertEquals(9, ((Map) ((Map) responseBody.get("hits")).get("total")).get("value")); + // Verify that _source filtering is working + List> hits = ((List>)((Map) responseBody.get("hits")).get("hits")); + Map sourceOfDoc0 = (Map)hits.get(0).get("_source"); + Map rule = (Map) sourceOfDoc0.get("rule"); + assertEquals(1, rule.size()); + assertTrue(rule.containsKey("query_field_names")); } @SuppressWarnings("unchecked") @@ -205,11 +281,24 @@ public void testSearchingPrepackagedRulesByAuthor() throws IOException { Assert.assertEquals(17, ((Map) ((Map) responseBody.get("hits")).get("total")).get("value")); } + public void testSearchingCustomRulesWhenNoneExist() throws IOException { + String request = "{\n" + + " \"query\": {\n" + + " \"match_all\": {}\n" + + " }\n" + + "}"; + + Response searchResponse = makeRequest(client(), "POST", String.format(Locale.getDefault(), "%s/_search", SecurityAnalyticsPlugin.RULE_BASE_URI), Collections.singletonMap("pre_packaged", "false"), + new StringEntity(request), new BasicHeader("Content-Type", "application/json")); + Assert.assertEquals("Searching rules failed", RestStatus.OK, restStatus(searchResponse)); + Map responseBody = asMap(searchResponse); + Assert.assertEquals(0, ((Map) ((Map) responseBody.get("hits")).get("total")).get("value")); + } @SuppressWarnings("unchecked") public void testSearchingCustomRules() throws IOException { String rule = randomRule(); - Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", "windows"), + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", randomDetectorType()), new StringEntity(rule), new BasicHeader("Content-Type", "application/json")); Assert.assertEquals("Create rule failed", RestStatus.CREATED, restStatus(createResponse)); @@ -220,7 +309,7 @@ public void testSearchingCustomRules() throws IOException { " \"query\": {\n" + " \"bool\": {\n" + " \"must\": [\n" + - " { \"match\": {\"rule.category\": \"windows\"}}\n" + + " { \"match\": {\"rule.category\": \"" + randomDetectorType().toLowerCase(Locale.ROOT) + "\"}}\n" + " ]\n" + " }\n" + " }\n" + @@ -244,7 +333,7 @@ public void testUpdatingUnusedRule() throws IOException { // both req params and req body are supported createMappingRequest.setJsonEntity( "{ \"index_name\":\"" + index + "\"," + - " \"rule_topic\":\"windows\", " + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + " \"partial\":true" + "}" ); @@ -254,18 +343,55 @@ public void testUpdatingUnusedRule() throws IOException { String rule = randomRule(); - Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", "windows"), + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", randomDetectorType()), new StringEntity(rule), new BasicHeader("Content-Type", "application/json")); Assert.assertEquals("Create rule failed", RestStatus.CREATED, restStatus(createResponse)); Map responseBody = asMap(createResponse); String createdId = responseBody.get("_id").toString(); - Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.RULE_BASE_URI + "/" + createdId, Map.of("category", "windows"), + Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.RULE_BASE_URI + "/" + createdId, Map.of("category", randomDetectorType()), new StringEntity(randomEditedRule()), new BasicHeader("Content-Type", "application/json")); Assert.assertEquals("Update rule failed", RestStatus.OK, restStatus(updateResponse)); } + public void testUpdatingARule_incorrect_category() throws IOException { + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response response = client().performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + String rule = randomRule(); + + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", randomDetectorType()), + new StringEntity(rule), new BasicHeader("Content-Type", "application/json")); + Assert.assertEquals("Create rule failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + String createdId = responseBody.get("_id").toString(); + + try { + makeRequest(client(), "PUT", SecurityAnalyticsPlugin.RULE_BASE_URI + "/" + createdId, Map.of("category", "unknown_category"), + new StringEntity(randomEditedRule()), new BasicHeader("Content-Type", "application/json")); + fail("expected exception due to invalid category"); + } catch (ResponseException e) { + assertEquals(HttpStatus.SC_BAD_REQUEST, e.getResponse().getStatusLine().getStatusCode()); + Assert.assertTrue( + e.getMessage().contains("Invalid rule category") + ); + } + } + public void testUpdatingUnusedRuleAfterDetectorIndexCreated() throws IOException { String index = createTestIndex(randomIndex(), windowsIndexMapping()); @@ -274,7 +400,7 @@ public void testUpdatingUnusedRuleAfterDetectorIndexCreated() throws IOException // both req params and req body are supported createMappingRequest.setJsonEntity( "{ \"index_name\":\"" + index + "\"," + - " \"rule_topic\":\"windows\", " + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + " \"partial\":true" + "}" ); @@ -284,7 +410,7 @@ public void testUpdatingUnusedRuleAfterDetectorIndexCreated() throws IOException String rule = randomRule(); - Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", "windows"), + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", randomDetectorType()), new StringEntity(rule), new BasicHeader("Content-Type", "application/json")); Assert.assertEquals("Create rule failed", RestStatus.CREATED, restStatus(createResponse)); @@ -299,7 +425,7 @@ public void testUpdatingUnusedRuleAfterDetectorIndexCreated() throws IOException createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); - Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.RULE_BASE_URI + "/" + createdId, Map.of("category", "windows"), + Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.RULE_BASE_URI + "/" + createdId, Map.of("category", randomDetectorType()), new StringEntity(randomEditedRule()), new BasicHeader("Content-Type", "application/json")); Assert.assertEquals("Update rule failed", RestStatus.OK, restStatus(updateResponse)); } @@ -313,7 +439,7 @@ public void testUpdatingUsedRule() throws IOException { // both req params and req body are supported createMappingRequest.setJsonEntity( "{ \"index_name\":\"" + index + "\"," + - " \"rule_topic\":\"windows\", " + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + " \"partial\":true" + "}" ); @@ -323,7 +449,7 @@ public void testUpdatingUsedRule() throws IOException { String rule = randomRule(); - Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", "windows"), + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", randomDetectorType()), new StringEntity(rule), new BasicHeader("Content-Type", "application/json")); Assert.assertEquals("Create rule failed", RestStatus.CREATED, restStatus(createResponse)); @@ -364,14 +490,14 @@ public void testUpdatingUsedRule() throws IOException { Assert.assertEquals(6, noOfSigmaRuleMatches); try { - makeRequest(client(), "PUT", SecurityAnalyticsPlugin.RULE_BASE_URI + "/" + createdId, Collections.singletonMap("category", "windows"), + makeRequest(client(), "PUT", SecurityAnalyticsPlugin.RULE_BASE_URI + "/" + createdId, Collections.singletonMap("category", randomDetectorType()), new StringEntity(randomEditedRule()), new BasicHeader("Content-Type", "application/json")); } catch (ResponseException ex) { Assert.assertTrue(new String(ex.getResponse().getEntity().getContent().readAllBytes()) .contains(String.format(Locale.getDefault(), "Rule with id %s is actively used by detectors. Update can be forced by setting forced flag to true", createdId))); } - Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.RULE_BASE_URI + "/" + createdId, Map.of("category", "windows", "forced", "true"), + Response updateResponse = makeRequest(client(), "PUT", SecurityAnalyticsPlugin.RULE_BASE_URI + "/" + createdId, Map.of("category", randomDetectorType(), "forced", "true"), new StringEntity(randomEditedRule()), new BasicHeader("Content-Type", "application/json")); Assert.assertEquals("Update rule failed", RestStatus.OK, restStatus(updateResponse)); @@ -404,7 +530,7 @@ public void testDeletingUnusedRule() throws IOException { // both req params and req body are supported createMappingRequest.setJsonEntity( "{ \"index_name\":\"" + index + "\"," + - " \"rule_topic\":\"windows\", " + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + " \"partial\":true" + "}" ); @@ -414,7 +540,7 @@ public void testDeletingUnusedRule() throws IOException { String rule = randomRule(); - Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", "windows"), + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", randomDetectorType()), new StringEntity(rule), new BasicHeader("Content-Type", "application/json")); Assert.assertEquals("Create rule failed", RestStatus.CREATED, restStatus(createResponse)); @@ -433,7 +559,7 @@ public void testDeletingUnusedRuleAfterDetectorIndexCreated() throws IOException // both req params and req body are supported createMappingRequest.setJsonEntity( "{ \"index_name\":\"" + index + "\"," + - " \"rule_topic\":\"windows\", " + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + " \"partial\":true" + "}" ); @@ -443,7 +569,7 @@ public void testDeletingUnusedRuleAfterDetectorIndexCreated() throws IOException String rule = randomRule(); - Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", "windows"), + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", randomDetectorType()), new StringEntity(rule), new BasicHeader("Content-Type", "application/json")); Assert.assertEquals("Create rule failed", RestStatus.CREATED, restStatus(createResponse)); @@ -469,7 +595,7 @@ public void testDeletingUsedRule() throws IOException { // both req params and req body are supported createMappingRequest.setJsonEntity( "{ \"index_name\":\"" + index + "\"," + - " \"rule_topic\":\"windows\", " + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + " \"partial\":true" + "}" ); @@ -479,7 +605,7 @@ public void testDeletingUsedRule() throws IOException { String rule = randomRule(); - Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", "windows"), + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", randomDetectorType()), new StringEntity(rule), new BasicHeader("Content-Type", "application/json")); Assert.assertEquals("Create rule failed", RestStatus.CREATED, restStatus(createResponse)); @@ -511,7 +637,7 @@ public void testDeletingUsedRule() throws IOException { " }\n" + " }\n" + "}"; - List hits = executeSearch(DetectorMonitorConfig.getRuleIndex("windows"), request); + List hits = executeSearch(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request); Assert.assertEquals(2, hits.size()); Response deleteResponse = makeRequest(client(), "DELETE", SecurityAnalyticsPlugin.RULE_BASE_URI + "/" + createdId, Collections.singletonMap("forced", "true"), null); @@ -524,7 +650,7 @@ public void testDeletingUsedRule() throws IOException { " }\n" + " }\n" + "}"; - hits = executeSearch(DetectorMonitorConfig.getRuleIndex("windows"), request); + hits = executeSearch(DetectorMonitorConfig.getRuleIndex(randomDetectorType()), request); Assert.assertEquals(0, hits.size()); index = Rule.CUSTOM_RULES_INDEX; @@ -545,4 +671,123 @@ public void testDeletingUsedRule() throws IOException { hits = executeSearch(index, request); Assert.assertEquals(0, hits.size()); } + + public void testDeletingNonExistingCustomRule() throws IOException { + try { + makeRequest(client(), "DELETE", SecurityAnalyticsPlugin.RULE_BASE_URI + "/" + java.util.UUID.randomUUID(), Collections.emptyMap(), null); + } catch (ResponseException ex) { + Assert.assertEquals(404, ex.getResponse().getStatusLine().getStatusCode()); + } + } + + public void testCustomRuleValidation() throws IOException { + String rule1 = "title: Remote Encrypting File System Abuse\n" + + "id: 5f92fff9-82e2-48eb-8fc1-8b133556a551\n" + + "description: Detects remote RPC calls to possibly abuse remote encryption service via MS-EFSR\n" + + "references:\n" + + " - https://attack.mitre.org/tactics/TA0008/\n" + + " - https://msrc.microsoft.com/update-guide/vulnerability/CVE-2021-36942\n" + + " - https://github.com/jsecurity101/MSRPC-to-ATTACK/blob/main/documents/MS-EFSR.md\n" + + " - https://github.com/zeronetworks/rpcfirewall\n" + + " - https://zeronetworks.com/blog/stopping_lateral_movement_via_the_rpc_firewall/\n" + + "tags:\n" + + " - attack.defense_evasion\n" + + "status: experimental\n" + + "author: Sagie Dulce, Dekel Paz\n" + + "date: 2022/01/01\n" + + "modified: 2022/01/01\n" + + "logsource:\n" + + " product: rpc_firewall\n" + + " category: application\n" + + " definition: 'Requirements: install and apply the RPC Firewall to all processes with \"audit:true action:block uuid:df1941c5-fe89-4e79-bf10-463657acf44d or c681d488-d850-11d0-8c52-00c04fd90f7e'\n" + + "detection:\n" + + " selection:\n" + + " EventID: 22\n" + + " condition: selection\n" + + "falsepositives:\n" + + " - Legitimate usage of remote file encryption\n" + + "level: high"; + + String rule2 = "title: Remote Encrypting File System Abuse\n" + + "id: 5f92fff9-82e2-48eb-8fc1-8b133556a551\n" + + "description: Detects remote RPC calls to possibly abuse remote encryption service via MS-EFSR\n" + + "references:\n" + + " - https://attack.mitre.org/tactics/TA0008/\n" + + " - https://msrc.microsoft.com/update-guide/vulnerability/CVE-2021-36942\n" + + " - https://github.com/jsecurity101/MSRPC-to-ATTACK/blob/main/documents/MS-EFSR.md\n" + + " - https://github.com/zeronetworks/rpcfirewall\n" + + " - https://zeronetworks.com/blog/stopping_lateral_movement_via_the_rpc_firewall/\n" + + "tags:\n" + + " - attack.defense_evasion\n" + + "status: experimental\n" + + "author: Sagie Dulce, Dekel Paz\n" + + "date: 2022/01/01\n" + + "modified: 2022/01/01\n" + + "logsource:\n" + + " product: rpc_firewall\n" + + " category: application\n" + + " definition: 'Requirements: install and apply the RPC Firewall to all processes with \"audit:true action:block uuid:df1941c5-fe89-4e79-bf10-463657acf44d or c681d488-d850-11d0-8c52-00c04fd90f7e'\n" + + "detection:\n" + + " selection:\n" + + " EventID123: 22\n" + + " condition: selection\n" + + "falsepositives:\n" + + " - Legitimate usage of remote file encryption\n" + + "level: high"; + + // Create rule #1 + Response createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", randomDetectorType()), + new StringEntity(rule1), new BasicHeader("Content-Type", "application/json")); + Assert.assertEquals("Create rule failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String rule1createdId = responseBody.get("_id").toString(); + int createdVersion = Integer.parseInt(responseBody.get("_version").toString()); + Assert.assertNotEquals("response is missing Id", Detector.NO_ID, rule1createdId); + Assert.assertTrue("incorrect version", createdVersion > 0); + Assert.assertEquals("Incorrect Location header", String.format(Locale.getDefault(), "%s/%s", SecurityAnalyticsPlugin.RULE_BASE_URI, rule1createdId), createResponse.getHeader("Location")); + // Create rule #2 + createResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI, Collections.singletonMap("category", randomDetectorType()), + new StringEntity(rule2), new BasicHeader("Content-Type", "application/json")); + Assert.assertEquals("Create rule failed", RestStatus.CREATED, restStatus(createResponse)); + + responseBody = asMap(createResponse); + + String rule2createdId = responseBody.get("_id").toString(); + createdVersion = Integer.parseInt(responseBody.get("_version").toString()); + Assert.assertNotEquals("response is missing Id", Detector.NO_ID, rule2createdId); + Assert.assertTrue("incorrect version", createdVersion > 0); + Assert.assertEquals("Incorrect Location header", String.format(Locale.getDefault(), "%s/%s", SecurityAnalyticsPlugin.RULE_BASE_URI, rule2createdId), createResponse.getHeader("Location")); + + // Create logIndex + createTestIndex("log_index_123", windowsIndexMapping()); + String validateRulesRequest = "{" + + "\"index_name\": \"log_index_123\"," + + "\"rules\": [\"" + rule1createdId + "\",\"" + rule2createdId + "\"]" + + "}"; + Response validationResponse = makeRequest(client(), "POST", SecurityAnalyticsPlugin.RULE_BASE_URI + "/validate", Collections.EMPTY_MAP, new StringEntity(validateRulesRequest), new BasicHeader("Content-Type", "application/json")); + responseBody = asMap(validationResponse); + assertTrue(responseBody.containsKey("nonapplicable_fields")); + assertEquals(rule2createdId, ((List)responseBody.get("nonapplicable_fields")).get(0)); + } + + public void testGetAllRuleCategories() throws IOException { + Response response = makeRequest(client(), "GET", SecurityAnalyticsPlugin.RULE_BASE_URI + "/categories", Collections.emptyMap(), null); + List categories = (List) asMap(response).get("rule_categories"); + assertEquals(13, categories.size()); + assertTrue(((Map)categories.get(0)).get("key").equals("ad_ldap")); + assertTrue(((Map)categories.get(1)).get("key").equals("dns")); + assertTrue(((Map)categories.get(2)).get("key").equals("network")); + assertTrue(((Map)categories.get(3)).get("key").equals("apache_access")); + assertTrue(((Map)categories.get(4)).get("key").equals("cloudtrail")); + assertTrue(((Map)categories.get(5)).get("key").equals("s3")); + assertTrue(((Map)categories.get(6)).get("key").equals("windows")); + assertTrue(((Map)categories.get(7)).get("key").equals("gworkspace")); + assertTrue(((Map)categories.get(8)).get("key").equals("github")); + assertTrue(((Map)categories.get(9)).get("key").equals("m365")); + assertTrue(((Map)categories.get(10)).get("key").equals("okta")); + assertTrue(((Map)categories.get(11)).get("key").equals("azure")); + assertTrue(((Map)categories.get(12)).get("key").equals("linux")); + } } \ No newline at end of file diff --git a/src/test/java/org/opensearch/securityanalytics/resthandler/SecureDetectorRestApiIT.java b/src/test/java/org/opensearch/securityanalytics/resthandler/SecureDetectorRestApiIT.java new file mode 100644 index 000000000..e9ce3b9d9 --- /dev/null +++ b/src/test/java/org/opensearch/securityanalytics/resthandler/SecureDetectorRestApiIT.java @@ -0,0 +1,412 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.securityanalytics.resthandler; + +import org.apache.http.HttpEntity; +import org.apache.http.HttpHost; +import org.apache.http.HttpStatus; +import org.apache.http.entity.ContentType; +import org.apache.http.nio.entity.NStringEntity; +import org.junit.After; +import org.junit.Before; +import org.opensearch.client.Request; +import org.opensearch.client.Response; +import org.opensearch.client.ResponseException; +import org.opensearch.client.RestClient; +import org.opensearch.common.settings.Settings; +import org.opensearch.commons.rest.SecureRestClientBuilder; +import org.opensearch.rest.RestStatus; +import org.opensearch.search.SearchHit; +import org.opensearch.securityanalytics.SecurityAnalyticsPlugin; +import org.opensearch.securityanalytics.SecurityAnalyticsRestTestCase; +import org.junit.Assert; +import org.opensearch.securityanalytics.model.Detector; + +import java.util.Collections; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.io.IOException; + +import static org.opensearch.securityanalytics.TestHelpers.*; +import static org.opensearch.securityanalytics.TestHelpers.randomDoc; + +public class SecureDetectorRestApiIT extends SecurityAnalyticsRestTestCase { + + static String SECURITY_ANALYTICS_FULL_ACCESS_ROLE = "security_analytics_full_access"; + static String SECURITY_ANALYTICS_READ_ACCESS_ROLE = "security_analytics_read_access"; + static String TEST_HR_BACKEND_ROLE = "HR"; + + static String TEST_IT_BACKEND_ROLE = "IT"; + + static Map roleToPermissionsMap = Map.ofEntries( + Map.entry(SECURITY_ANALYTICS_FULL_ACCESS_ROLE, "cluster:admin/opendistro/securityanalytics/detector/*"), + Map.entry(SECURITY_ANALYTICS_READ_ACCESS_ROLE, "cluster:admin/opendistro/securityanalytics/detector/read") + ); + + private RestClient userClient; + private final String user = "userDetector"; + + + @Before + public void create() throws IOException { + String[] backendRoles = { TEST_HR_BACKEND_ROLE }; + createUserWithData(user, user, SECURITY_ANALYTICS_FULL_ACCESS_ROLE, backendRoles ); + if (userClient == null) { + userClient = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[]{}), isHttps(), user, user).setSocketTimeout(60000).build(); + } + } + + @After + public void cleanup() throws IOException { + userClient.close(); + deleteUser(user); + } + + @SuppressWarnings("unchecked") + public void testCreateDetectorWithFullAccess() throws IOException { + try { + String index = createTestIndex(randomIndex(), windowsIndexMapping()); + // Assign a role to the index + createIndexRole(TEST_HR_ROLE, Collections.emptyList(), indexPermissions, List.of(index)); + String[] users = {user}; + // Assign a role to existing user + createUserRolesMapping(TEST_HR_ROLE, users); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response response = userClient.performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + Detector detector = randomDetector(getRandomPrePackagedRules()); + + Response createResponse = makeRequest(userClient, "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + Assert.assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String createdId = responseBody.get("_id").toString(); + int createdVersion = Integer.parseInt(responseBody.get("_version").toString()); + Assert.assertNotEquals("response is missing Id", Detector.NO_ID, createdId); + Assert.assertTrue("incorrect version", createdVersion > 0); + Assert.assertEquals("Incorrect Location header", String.format(Locale.getDefault(), "%s/%s", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, createdId), createResponse.getHeader("Location")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("rule_topic_index")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("findings_index")); + Assert.assertFalse(((Map) responseBody.get("detector")).containsKey("alert_index")); + + String request = "{\n" + + " \"query\" : {\n" + + " \"match\":{\n" + + " \"_id\": \"" + createdId + "\"\n" + + " }\n" + + " }\n" + + "}"; + List hits = executeSearch(Detector.DETECTORS_INDEX, request); + SearchHit hit = hits.get(0); + + String monitorId = ((List) ((Map) hit.getSourceAsMap().get("detector")).get("monitor_id")).get(0); + + indexDoc(index, "1", randomDoc()); + + Response executeResponse = executeAlertingMonitor(monitorId, Collections.emptyMap()); + Map executeResults = entityAsMap(executeResponse); + + int noOfSigmaRuleMatches = ((List>) ((Map) executeResults.get("input_results")).get("results")).get(0).size(); + Assert.assertEquals(5, noOfSigmaRuleMatches); + + // try to do get detector as a user with read access + String userRead = "userRead"; + String[] backendRoles = { TEST_IT_BACKEND_ROLE }; + createUserWithData( userRead, userRead, SECURITY_ANALYTICS_READ_ACCESS_ROLE, backendRoles ); + RestClient userReadOnlyClient = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[]{}), isHttps(), userRead, userRead).setSocketTimeout(60000).build(); + Response getResponse = makeRequest(userReadOnlyClient, "GET", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + createdId, Collections.emptyMap(), null); + Map getResponseBody = asMap(getResponse); + Assert.assertEquals(createdId, getResponseBody.get("_id")); + + + // Enable backend filtering and try to read detector as a user with no backend roles matching the user who created the detector + enableOrDisableFilterBy("true"); + try { + getResponse = makeRequest(userReadOnlyClient, "GET", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + createdId, Collections.emptyMap(), null); + } catch (ResponseException e) + { + assertEquals("Get detector failed", RestStatus.FORBIDDEN, restStatus(e.getResponse())); + } + finally { + userReadOnlyClient.close(); + deleteUser(userRead); + } + + // recreate user with matching backend roles and try again + String[] newBackendRoles = { TEST_HR_BACKEND_ROLE }; + createUserWithData( userRead, userRead, SECURITY_ANALYTICS_READ_ACCESS_ROLE, newBackendRoles ); + userReadOnlyClient = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[]{}), isHttps(), userRead, userRead).setSocketTimeout(60000).build(); + getResponse = makeRequest(userReadOnlyClient, "GET", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + createdId, Collections.emptyMap(), null); + getResponseBody = asMap(getResponse); + Assert.assertEquals(createdId, getResponseBody.get("_id")); + + //Search on id should give one result + String queryJson = "{ \"query\": { \"match\": { \"_id\" : \"" + createdId + "\"} } }"; + HttpEntity requestEntity = new NStringEntity(queryJson, ContentType.APPLICATION_JSON); + Response searchResponse = makeRequest(userReadOnlyClient, "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + "_search", Collections.emptyMap(), requestEntity); + Map searchResponseBody = asMap(searchResponse); + Assert.assertNotNull("response is not null", searchResponseBody); + Map searchResponseHits = (Map) searchResponseBody.get("hits"); + Map searchResponseTotal = (Map) searchResponseHits.get("total"); + Assert.assertEquals(1, searchResponseTotal.get("value")); + + userReadOnlyClient.close(); + deleteUser(userRead); + } finally { + tryDeletingRole(TEST_HR_ROLE); + } + } + + public void testCreateDetectorWithNoBackendRoles() throws IOException { + // try to do create detector as a user with no backend roles + String userFull= "userFull"; + String[] backendRoles = {}; + createUserWithData( userFull, userFull, SECURITY_ANALYTICS_FULL_ACCESS_ROLE, backendRoles ); + RestClient userFullClient = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[]{}), isHttps(), userFull, userFull).setSocketTimeout(60000).build(); + + String index = createTestIndex(client(), randomIndex(), windowsIndexMapping(), Settings.EMPTY); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + + Response response = userFullClient.performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + Detector detector = randomDetector(getRandomPrePackagedRules()); + // Enable backend filtering and try to read detector as a user with no backend roles matching the user who created the detector + enableOrDisableFilterBy("true"); + try { + Response createResponse = makeRequest(userFullClient, "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + } catch (ResponseException e) + { + assertEquals("Create detector failed", RestStatus.FORBIDDEN, restStatus(e.getResponse())); + } + finally { + userFullClient.close(); + deleteUser(userFull); + } + } + + public void testCreateDetector_userHasIndexAccess_success() throws IOException { + String[] backendRoles = { TEST_IT_BACKEND_ROLE }; + String userWithAccess = "user1"; + String roleNameWithIndexPatternAccess = "test-role-1"; + String windowsIndexPattern = "windows*"; + createUserWithDataAndCustomRole(userWithAccess, userWithAccess, roleNameWithIndexPatternAccess, backendRoles, clusterPermissions, indexPermissions, List.of(windowsIndexPattern)); + RestClient clientWithAccess = null; + + try { + clientWithAccess = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[]{}), isHttps(), userWithAccess, userWithAccess).setSocketTimeout(60000).build(); + String index = createTestIndex(client(), randomIndex(), windowsIndexMapping(), Settings.EMPTY); + + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + Response response = clientWithAccess.performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + Detector detector = randomDetector(getRandomPrePackagedRules()); + + Response createResponse = makeRequest(clientWithAccess, "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String createdId = responseBody.get("_id").toString(); + int createdVersion = Integer.parseInt(responseBody.get("_version").toString()); + + assertNotEquals("response is missing Id", Detector.NO_ID, createdId); + assertTrue("incorrect version", createdVersion > 0); + assertEquals("Incorrect Location header", String.format(Locale.getDefault(), "%s/%s", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, createdId), createResponse.getHeader("Location")); + assertFalse(((Map) responseBody.get("detector")).containsKey("rule_topic_index")); + assertFalse(((Map) responseBody.get("detector")).containsKey("findings_index")); + assertFalse(((Map) responseBody.get("detector")).containsKey("alert_index")); + } finally { + if (clientWithAccess != null) clientWithAccess.close(); + deleteUser(userWithAccess); + tryDeletingRole(roleNameWithIndexPatternAccess); + } + } + + public void testCreateDetector_userDoesntHaveIndexAccess_failure() throws IOException { + String[] backendRoles = { TEST_IT_BACKEND_ROLE }; + + String userWithoutAccess = "user"; + String roleNameWithoutIndexPatternAccess = "test-role"; + String testIndexPattern = "test*"; + createUserWithDataAndCustomRole(userWithoutAccess, userWithoutAccess, roleNameWithoutIndexPatternAccess, backendRoles, clusterPermissions, indexPermissions, List.of(testIndexPattern)); + RestClient clientWithoutAccess = null; + + try { + clientWithoutAccess = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[]{}), isHttps(), userWithoutAccess, userWithoutAccess).setSocketTimeout(60000).build(); + + String index = createTestIndex(client(), randomIndex(), windowsIndexMapping(), Settings.EMPTY); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + Response response = userClient.performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + Detector detector = randomDetector(getRandomPrePackagedRules()); + + try { + makeRequest(clientWithoutAccess, "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + } catch (ResponseException e) { + assertEquals("Create detector error status", RestStatus.FORBIDDEN, restStatus(e.getResponse())); + } + } finally { + if (clientWithoutAccess!= null) clientWithoutAccess.close(); + deleteUser(userWithoutAccess); + tryDeletingRole(roleNameWithoutIndexPatternAccess); + } + } + + public void testUpdateDetector_userHasIndexAccess_success() throws IOException { + String[] backendRoles = { TEST_IT_BACKEND_ROLE }; + + String userWithAccess = "user1"; + String roleNameWithIndexPatternAccess = "test-role-1"; + String windowsIndexPattern = "windows*"; + createUserWithDataAndCustomRole(userWithAccess, userWithAccess, roleNameWithIndexPatternAccess, backendRoles, clusterPermissions, indexPermissions, List.of(windowsIndexPattern)); + RestClient clientWithAccess = null; + try { + clientWithAccess = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[]{}), isHttps(), userWithAccess, userWithAccess).setSocketTimeout(60000).build(); + //createUserRolesMapping("alerting_full_access", users); + String index = createTestIndex(client(), randomIndex(), windowsIndexMapping(), Settings.EMPTY); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + Response response = clientWithAccess.performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + Detector detector = randomDetector(getRandomPrePackagedRules()); + + Response createResponse = makeRequest(clientWithAccess, "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String createdId = responseBody.get("_id").toString(); + int createdVersion = Integer.parseInt(responseBody.get("_version").toString()); + + assertNotEquals("response is missing Id", Detector.NO_ID, createdId); + assertTrue("incorrect version", createdVersion > 0); + assertEquals("Incorrect Location header", String.format(Locale.getDefault(), "%s/%s", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, createdId), createResponse.getHeader("Location")); + assertFalse(((Map) responseBody.get("detector")).containsKey("rule_topic_index")); + assertFalse(((Map) responseBody.get("detector")).containsKey("findings_index")); + assertFalse(((Map) responseBody.get("detector")).containsKey("alert_index")); + + String detectorId = responseBody.get("_id").toString(); + Response updateResponse = makeRequest(clientWithAccess, "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(detector)); + assertEquals("Update detector failed", RestStatus.OK, restStatus(updateResponse)); + } finally { + if (clientWithAccess != null) clientWithAccess.close(); + deleteUser(userWithAccess); + tryDeletingRole(roleNameWithIndexPatternAccess); + } + } + + public void testUpdateDetector_userDoesntHaveIndexAccess_failure() throws IOException { + String[] backendRoles = { TEST_IT_BACKEND_ROLE }; + + String userWithoutAccess = "user"; + String roleNameWithoutIndexPatternAccess = "test-role"; + String testIndexPattern = "test*"; + createUserWithDataAndCustomRole(userWithoutAccess, userWithoutAccess, roleNameWithoutIndexPatternAccess, backendRoles, clusterPermissions, indexPermissions, List.of(testIndexPattern)); + RestClient clientWithoutAccess = null; + + try { + clientWithoutAccess = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[]{}), isHttps(), userWithoutAccess, userWithoutAccess).setSocketTimeout(60000).build(); + + //createUserRolesMapping("alerting_full_access", users); + String index = createTestIndex(client(), randomIndex(), windowsIndexMapping(), Settings.EMPTY); + // Assign a role to the index + createIndexRole(TEST_HR_ROLE, Collections.emptyList(), indexPermissions, List.of(index)); + String[] users = {user}; + // Assign a role to existing user + createUserRolesMapping(TEST_HR_ROLE, users); + + // Execute CreateMappingsAction to add alias mapping for index + Request createMappingRequest = new Request("POST", SecurityAnalyticsPlugin.MAPPER_BASE_URI); + // both req params and req body are supported + createMappingRequest.setJsonEntity( + "{ \"index_name\":\"" + index + "\"," + + " \"rule_topic\":\"" + randomDetectorType() + "\", " + + " \"partial\":true" + + "}" + ); + Response response = userClient.performRequest(createMappingRequest); + assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); + + Detector detector = randomDetector(getRandomPrePackagedRules()); + + Response createResponse = makeRequest(userClient, "POST", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, Collections.emptyMap(), toHttpEntity(detector)); + assertEquals("Create detector failed", RestStatus.CREATED, restStatus(createResponse)); + + Map responseBody = asMap(createResponse); + + String createdId = responseBody.get("_id").toString(); + int createdVersion = Integer.parseInt(responseBody.get("_version").toString()); + + assertNotEquals("response is missing Id", Detector.NO_ID, createdId); + assertTrue("incorrect version", createdVersion > 0); + assertEquals("Incorrect Location header", String.format(Locale.getDefault(), "%s/%s", SecurityAnalyticsPlugin.DETECTOR_BASE_URI, createdId), createResponse.getHeader("Location")); + assertFalse(((Map) responseBody.get("detector")).containsKey("rule_topic_index")); + assertFalse(((Map) responseBody.get("detector")).containsKey("findings_index")); + assertFalse(((Map) responseBody.get("detector")).containsKey("alert_index")); + + String detectorId = responseBody.get("_id").toString(); + + try { + makeRequest(clientWithoutAccess, "PUT", SecurityAnalyticsPlugin.DETECTOR_BASE_URI + "/" + detectorId, Collections.emptyMap(), toHttpEntity(detector)); + } catch (ResponseException e) { + assertEquals("Update detector error status", RestStatus.FORBIDDEN, restStatus(e.getResponse())); + } + } finally { + if (clientWithoutAccess != null) clientWithoutAccess.close(); + deleteUser(userWithoutAccess); + tryDeletingRole(roleNameWithoutIndexPatternAccess); + tryDeletingRole(TEST_HR_ROLE); + } + } +} \ No newline at end of file diff --git a/src/test/java/org/opensearch/securityanalytics/rules/aggregation/AggregationBackendTests.java b/src/test/java/org/opensearch/securityanalytics/rules/aggregation/AggregationBackendTests.java index b25b276d8..4d394ed36 100644 --- a/src/test/java/org/opensearch/securityanalytics/rules/aggregation/AggregationBackendTests.java +++ b/src/test/java/org/opensearch/securityanalytics/rules/aggregation/AggregationBackendTests.java @@ -42,7 +42,7 @@ public void testCountAggregation() throws SigmaError, IOException { String aggQuery = aggQueries.getAggQuery(); String bucketTriggerQuery = aggQueries.getBucketTriggerQuery(); - Assert.assertEquals("\"aggs\":{\"result_agg\":{\"terms\":{\"field\":\"_index\"}}}", aggQuery); + Assert.assertEquals("{\"result_agg\":{\"terms\":{\"field\":\"_index\"}}}", aggQuery); Assert.assertEquals("{\"buckets_path\":{\"_cnt\":\"_cnt\"},\"parent_bucket_path\":\"result_agg\",\"script\":{\"source\":\"params._cnt > 1.0\",\"lang\":\"painless\"}}", bucketTriggerQuery); } @@ -73,7 +73,7 @@ public void testCountAggregationWithGroupBy() throws IOException, SigmaError { String aggQuery = aggQueries.getAggQuery(); String bucketTriggerQuery = aggQueries.getBucketTriggerQuery(); - Assert.assertEquals("\"aggs\":{\"result_agg\":{\"terms\":{\"field\":\"fieldB\"}}}", aggQuery); + Assert.assertEquals("{\"result_agg\":{\"terms\":{\"field\":\"fieldB\"}}}", aggQuery); Assert.assertEquals("{\"buckets_path\":{\"_cnt\":\"_cnt\"},\"parent_bucket_path\":\"result_agg\",\"script\":{\"source\":\"params._cnt > 1.0\",\"lang\":\"painless\"}}", bucketTriggerQuery); } @@ -104,7 +104,10 @@ public void testSumAggregationWithGroupBy() throws IOException, SigmaError { String aggQuery = aggQueries.getAggQuery(); String bucketTriggerQuery = aggQueries.getBucketTriggerQuery(); - Assert.assertEquals("\"aggs\":{\"result_agg\":{\"terms\":{\"field\":\"fieldB\"},\"aggs\":{\"fieldA\":{\"sum\":{\"field\":\"fieldA\"}}}}}", aggQuery); + + // inputs.query.aggregations -> Query + Assert.assertEquals("{\"result_agg\":{\"terms\":{\"field\":\"fieldB\"},\"aggs\":{\"fieldA\":{\"sum\":{\"field\":\"fieldA\"}}}}}", aggQuery); + // triggers.bucket_level_trigger.condition -> Condition Assert.assertEquals("{\"buckets_path\":{\"fieldA\":\"fieldA\"},\"parent_bucket_path\":\"result_agg\",\"script\":{\"source\":\"params.fieldA > 110.0\",\"lang\":\"painless\"}}", bucketTriggerQuery); } @@ -135,7 +138,7 @@ public void testMinAggregationWithGroupBy() throws IOException, SigmaError { String aggQuery = aggQueries.getAggQuery(); String bucketTriggerQuery = aggQueries.getBucketTriggerQuery(); - Assert.assertEquals("\"aggs\":{\"result_agg\":{\"terms\":{\"field\":\"fieldB\"},\"aggs\":{\"fieldA\":{\"min\":{\"field\":\"fieldA\"}}}}}", aggQuery); + Assert.assertEquals("{\"result_agg\":{\"terms\":{\"field\":\"fieldB\"},\"aggs\":{\"fieldA\":{\"min\":{\"field\":\"fieldA\"}}}}}", aggQuery); Assert.assertEquals("{\"buckets_path\":{\"fieldA\":\"fieldA\"},\"parent_bucket_path\":\"result_agg\",\"script\":{\"source\":\"params.fieldA > 110.0\",\"lang\":\"painless\"}}", bucketTriggerQuery); } @@ -166,7 +169,7 @@ public void testMaxAggregationWithGroupBy() throws IOException, SigmaError { String aggQuery = aggQueries.getAggQuery(); String bucketTriggerQuery = aggQueries.getBucketTriggerQuery(); - Assert.assertEquals("\"aggs\":{\"result_agg\":{\"terms\":{\"field\":\"fieldB\"},\"aggs\":{\"fieldA\":{\"max\":{\"field\":\"fieldA\"}}}}}", aggQuery); + Assert.assertEquals("{\"result_agg\":{\"terms\":{\"field\":\"fieldB\"},\"aggs\":{\"fieldA\":{\"max\":{\"field\":\"fieldA\"}}}}}", aggQuery); Assert.assertEquals("{\"buckets_path\":{\"fieldA\":\"fieldA\"},\"parent_bucket_path\":\"result_agg\",\"script\":{\"source\":\"params.fieldA > 110.0\",\"lang\":\"painless\"}}", bucketTriggerQuery); } @@ -197,7 +200,7 @@ public void testAvgAggregationWithGroupBy() throws IOException, SigmaError { String aggQuery = aggQueries.getAggQuery(); String bucketTriggerQuery = aggQueries.getBucketTriggerQuery(); - Assert.assertEquals("\"aggs\":{\"result_agg\":{\"terms\":{\"field\":\"fieldB\"},\"aggs\":{\"fieldA\":{\"avg\":{\"field\":\"fieldA\"}}}}}", aggQuery); + Assert.assertEquals("{\"result_agg\":{\"terms\":{\"field\":\"fieldB\"},\"aggs\":{\"fieldA\":{\"avg\":{\"field\":\"fieldA\"}}}}}", aggQuery); Assert.assertEquals("{\"buckets_path\":{\"fieldA\":\"fieldA\"},\"parent_bucket_path\":\"result_agg\",\"script\":{\"source\":\"params.fieldA > 110.0\",\"lang\":\"painless\"}}", bucketTriggerQuery); } } diff --git a/src/test/java/org/opensearch/securityanalytics/rules/backend/QueryBackendTests.java b/src/test/java/org/opensearch/securityanalytics/rules/backend/QueryBackendTests.java index d22cb2896..10501cddd 100644 --- a/src/test/java/org/opensearch/securityanalytics/rules/backend/QueryBackendTests.java +++ b/src/test/java/org/opensearch/securityanalytics/rules/backend/QueryBackendTests.java @@ -269,20 +269,20 @@ public void testConvertValueNull() throws IOException, SigmaError { OSQueryBackend queryBackend = testBackend(); List queries = queryBackend.convertRule(SigmaRule.fromYaml( " title: Test\n" + - " id: 39f919f3-980b-4e6f-a975-8af7e507ef2b\n" + - " status: test\n" + - " level: critical\n" + - " description: Detects QuarksPwDump clearing access history in hive\n" + - " author: Florian Roth\n" + - " date: 2017/05/15\n" + - " logsource:\n" + - " category: test_category\n" + - " product: test_product\n" + - " detection:\n" + - " sel:\n" + - " fieldA1: null\n" + - " condition: sel", false)); - Assert.assertEquals("mappedA: null", queries.get(0).toString()); + " id: 39f919f3-980b-4e6f-a975-8af7e507ef2b\n" + + " status: test\n" + + " level: critical\n" + + " description: Detects QuarksPwDump clearing access history in hive\n" + + " author: Florian Roth\n" + + " date: 2017/05/15\n" + + " logsource:\n" + + " category: test_category\n" + + " product: test_product\n" + + " detection:\n" + + " sel:\n" + + " fieldA1: null\n" + + " condition: sel", false)); + Assert.assertEquals("mappedA: (NOT [* TO *])", queries.get(0).toString()); } public void testConvertValueRegex() throws IOException, SigmaError { @@ -322,7 +322,7 @@ public void testConvertValueRegexUnbound() throws IOException, SigmaError { " sel:\n" + " \"|re\": pat.*tern\"foo\"bar\n" + " condition: sel", false)); - Assert.assertEquals("_0: /pat.*tern\\\"foo\\\"bar/", queries.get(0).toString()); + Assert.assertEquals("/pat.*tern\\\"foo\\\"bar/", queries.get(0).toString()); } public void testConvertValueCidrWildcardNone() throws IOException, SigmaError { @@ -478,7 +478,7 @@ public void testConvertOrInMixedKeywordField() throws IOException, SigmaError { " fieldB: value2\n" + " sel3: value3\n" + " condition: sel1 or sel2 or sel3", false)); - Assert.assertEquals("((fieldA: \"value1\") OR (mappedB: \"value2\")) OR (_0: \"value3\")", queries.get(0).toString()); + Assert.assertEquals("((fieldA: \"value1\") OR (mappedB: \"value2\")) OR (\"value3\")", queries.get(0).toString()); } public void testConvertOrInMixedFields() throws IOException, SigmaError { @@ -509,23 +509,23 @@ public void testConvertOrInUnallowedValueType() throws IOException, SigmaError { OSQueryBackend queryBackend = testBackend(); List queries = queryBackend.convertRule(SigmaRule.fromYaml( " title: Test\n" + - " id: 39f919f3-980b-4e6f-a975-8af7e507ef2b\n" + - " status: test\n" + - " level: critical\n" + - " description: Detects QuarksPwDump clearing access history in hive\n" + - " author: Florian Roth\n" + - " date: 2017/05/15\n" + - " logsource:\n" + - " category: test_category\n" + - " product: test_product\n" + - " detection:\n" + - " sel:\n" + - " fieldA1: \n" + - " - value1\n" + - " - value2\n" + - " - null\n" + - " condition: sel", false)); - Assert.assertEquals("(mappedA: \"value1\") OR (mappedA: \"value2\") OR (mappedA: null)", queries.get(0).toString()); + " id: 39f919f3-980b-4e6f-a975-8af7e507ef2b\n" + + " status: test\n" + + " level: critical\n" + + " description: Detects QuarksPwDump clearing access history in hive\n" + + " author: Florian Roth\n" + + " date: 2017/05/15\n" + + " logsource:\n" + + " category: test_category\n" + + " product: test_product\n" + + " detection:\n" + + " sel:\n" + + " fieldA1: \n" + + " - value1\n" + + " - value2\n" + + " - null\n" + + " condition: sel", false)); + Assert.assertEquals("(mappedA: \"value1\") OR (mappedA: \"value2\") OR (mappedA: (NOT [* TO *]))", queries.get(0).toString()); } public void testConvertOrInListNumbers() throws IOException, SigmaError { @@ -591,9 +591,9 @@ public void testConvertUnboundValues() throws IOException, SigmaError { " sel:\n" + " - value1\n" + " - value2\n" + - " - 4\n" + + " - 123\n" + " condition: sel", false)); - Assert.assertEquals("(_0: \"value1\") OR (_1: \"value2\") OR (_2: 4)", queries.get(0).toString()); + Assert.assertEquals("(\"value1\") OR (\"value2\") OR (\"123\")", queries.get(0).toString()); } public void testConvertInvalidUnboundBool() throws IOException { @@ -876,6 +876,103 @@ public void testConvertProxyRule() throws IOException, SigmaError { Assert.assertEquals(true, true); } + public void testConvertUnboundValuesAsWildcard() throws IOException, SigmaError { + OSQueryBackend queryBackend = testBackend(); + List queries = queryBackend.convertRule(SigmaRule.fromYaml( + " title: Test\n" + + " id: 39f919f3-980b-4e6f-a975-8af7e507ef2b\n" + + " status: test\n" + + " level: critical\n" + + " description: Detects QuarksPwDump clearing access history in hive\n" + + " author: Florian Roth\n" + + " date: 2017/05/15\n" + + " logsource:\n" + + " category: test_category\n" + + " product: test_product\n" + + " detection:\n" + + " sel:\n" + + " fieldA1: \n" + + " - value1\n" + + " - value2\n" + + " - value3\n" + + " keywords:\n" + + " - test*\n" + + " condition: sel or keywords", false)); + Assert.assertEquals("((mappedA: \"value1\") OR (mappedA: \"value2\") OR (mappedA: \"value3\")) OR (test*)", queries.get(0).toString()); + } + + public void testConvertSkipEmptyStringStartsWithModifier() throws IOException, SigmaError { + OSQueryBackend queryBackend = testBackend(); + Assert.assertThrows(SigmaValueError.class, () -> { + queryBackend.convertRule(SigmaRule.fromYaml( + " title: Test\n" + + " id: 39f919f3-980b-4e6f-a975-8af7e507ef2b\n" + + " status: test\n" + + " level: critical\n" + + " description: Detects QuarksPwDump clearing access history in hive\n" + + " author: Florian Roth\n" + + " date: 2017/05/15\n" + + " logsource:\n" + + " category: test_category\n" + + " product: test_product\n" + + " detection:\n" + + " sel:\n" + + " fieldA1|startswith: \n" + + " - value1\n" + + " - value2\n" + + " - ''\n" + + " condition: sel", false)); + }); + } + + public void testConvertSkipEmptyStringEndsWithModifier() throws IOException, SigmaError { + OSQueryBackend queryBackend = testBackend(); + Assert.assertThrows(SigmaValueError.class, () -> { + queryBackend.convertRule(SigmaRule.fromYaml( + " title: Test\n" + + " id: 39f919f3-980b-4e6f-a975-8af7e507ef2b\n" + + " status: test\n" + + " level: critical\n" + + " description: Detects QuarksPwDump clearing access history in hive\n" + + " author: Florian Roth\n" + + " date: 2017/05/15\n" + + " logsource:\n" + + " category: test_category\n" + + " product: test_product\n" + + " detection:\n" + + " sel:\n" + + " fieldA1|endswith: \n" + + " - value1\n" + + " - value2\n" + + " - ''\n" + + " condition: sel", false)); + }); + } + + public void testConvertSkipEmptyStringContainsModifier() throws IOException, SigmaError { + OSQueryBackend queryBackend = testBackend(); + Assert.assertThrows(SigmaValueError.class, () -> { + queryBackend.convertRule(SigmaRule.fromYaml( + " title: Test\n" + + " id: 39f919f3-980b-4e6f-a975-8af7e507ef2b\n" + + " status: test\n" + + " level: critical\n" + + " description: Detects QuarksPwDump clearing access history in hive\n" + + " author: Florian Roth\n" + + " date: 2017/05/15\n" + + " logsource:\n" + + " category: test_category\n" + + " product: test_product\n" + + " detection:\n" + + " sel:\n" + + " fieldA1|contains: \n" + + " - value1\n" + + " - value2\n" + + " - ''\n" + + " condition: sel", false)); + }); + } + private OSQueryBackend testBackend() throws IOException { return new OSQueryBackend("others_proxy", true, true); } diff --git a/src/test/java/org/opensearch/securityanalytics/rules/objects/SigmaDetectionsTests.java b/src/test/java/org/opensearch/securityanalytics/rules/objects/SigmaDetectionsTests.java index 461fd9021..dba65b1d0 100644 --- a/src/test/java/org/opensearch/securityanalytics/rules/objects/SigmaDetectionsTests.java +++ b/src/test/java/org/opensearch/securityanalytics/rules/objects/SigmaDetectionsTests.java @@ -20,7 +20,9 @@ import org.opensearch.securityanalytics.rules.utils.AnyOneOf; import org.opensearch.securityanalytics.rules.utils.Either; import org.opensearch.test.OpenSearchTestCase; +import org.yaml.snakeyaml.LoaderOptions; import org.yaml.snakeyaml.Yaml; +import org.yaml.snakeyaml.constructor.SafeConstructor; import java.util.Collections; import java.util.List; @@ -29,7 +31,7 @@ public class SigmaDetectionsTests extends OpenSearchTestCase { public void testSigmaDetectionsFromDict() throws SigmaError{ - Yaml yaml = new Yaml(); + Yaml yaml = new Yaml(new SafeConstructor(new LoaderOptions())); Map detectionsMap = yaml.load( " selection:\n" + " EventID: 16\n" + @@ -61,7 +63,7 @@ public void testSigmaDetectionsFromDict() throws SigmaError{ } public void testSigmaDetectionsFromDictNoDetections() { - Yaml yaml = new Yaml(); + Yaml yaml = new Yaml(new SafeConstructor(new LoaderOptions())); Map detectionsMap = yaml.load( " condition: selection"); Exception exception = assertThrows(SigmaDetectionError.class, () -> { @@ -75,7 +77,7 @@ public void testSigmaDetectionsFromDictNoDetections() { } public void testSigmaDetectionsFromDictNoCondition() { - Yaml yaml = new Yaml(); + Yaml yaml = new Yaml(new SafeConstructor(new LoaderOptions())); Map detectionsMap = yaml.load( " selection:\n" + " EventID: 16\n" + @@ -93,7 +95,7 @@ public void testSigmaDetectionsFromDictNoCondition() { } public void testDetectionItemAllModifiedKeyPlainValuesPostProcess() throws SigmaError{ - Yaml yaml = new Yaml(); + Yaml yaml = new Yaml(new SafeConstructor(new LoaderOptions())); Map detectionsMap = yaml.load( " selection:\n" + " field|all: [\"val1\", \"val2\", 123]\n" + @@ -111,7 +113,7 @@ public void testDetectionItemAllModifiedKeyPlainValuesPostProcess() throws Sigma } public void testDetectionItemAllModifiedUnboundPlainValuesPostProcess() throws SigmaError { - Yaml yaml = new Yaml(); + Yaml yaml = new Yaml(new SafeConstructor(new LoaderOptions())); Map detectionsMap = yaml.load( " selection:\n" + " \"|all\": [\"val1\", \"val2\", 123]\n" + @@ -129,7 +131,7 @@ public void testDetectionItemAllModifiedUnboundPlainValuesPostProcess() throws S } public void testDetectionItemAllModifiedKeySpecialValuesPostProcess() throws SigmaError { - Yaml yaml = new Yaml(); + Yaml yaml = new Yaml(new SafeConstructor(new LoaderOptions())); Map detectionsMap = yaml.load( " selection:\n" + " field|all: [\"val1*\", \"val2\", 123]\n" + diff --git a/src/test/resources/ad_ldap-sample.json b/src/test/resources/ad_ldap-sample.json new file mode 100644 index 000000000..3101b1af4 --- /dev/null +++ b/src/test/resources/ad_ldap-sample.json @@ -0,0 +1,21 @@ +{ + "azure-signinlogs-properties-user_id": "1234", + "azure-activitylogs-category": "1234", + "azure-platformlogs-operation_name": "1234", + "modified_properties-new_value": "1234", + "azure-resource-provider": "1234", + "azure-signinlogs-properties-conditional_access_status": "1234", + "SearchFilter": "1234", + "azure-platformlogs-result_type": "1234", + "azure-signinlogs-properties-device_detail-is_compliant": "1234", + "ResourceDisplayName": "1234", + "azure-signinlogs-properties-authentication_requirement": "1234", + "TargetResources": "1234", + "Workload": "1234", + "azure-signinlogs-properties-device_detail-device_id": "1234", + "azure-signinlogs-properties-resource_id": "1234", + "EventID": 12345, + "azure-signinlogs-properties-network_location_details": "1234", + "azure-auditlogs-properties-activity_display_name": "1234", + "@timestamp": "2022-12-27T20:29:31.734Z" +} \ No newline at end of file diff --git a/src/test/resources/azure-sample.json b/src/test/resources/azure-sample.json new file mode 100644 index 000000000..d42291fbc --- /dev/null +++ b/src/test/resources/azure-sample.json @@ -0,0 +1,27 @@ +{ + "azure.signinlogs.props.user_id": "111", + "azure.activitylogs.category": "111", + "modified_properties.new_value": "111", + "azure.resource.provider": "111", + "azure.signinlogs.props.conditional_access_status": "111", + "SearchFilter": "111", + "azure.platformlogs.result_type": "111", + "azure.signinlogs.props.device_detail.is_compliant": true, + "ResourceDisplayName": 111, + "azure.signinlogs.props.authentication_requirement": "111", + "TargetResources": "111", + "Workload": "111", + "azure.signinlogs.props.device_detail.device_id": "111", + "azure.platformlogs.operation_name": "111", + "azure.signinlogs.props.resource_id": "111", + "EventID": 1234, + "azure.signinlogs.props.network_location_details": "111", + "azure.auditlogs.props.activity_display_name": "111", + "azure.signinlogs.result-description": "111", + "eventSource": "111", + "eventName": "111", + "azure.platformlogs.status": "111", + "azure.auditlogs.props.logged_by_service": "111", + "properties_message": "111", + "@timestamp": "2022-12-27T20:29:31.734Z" +} \ No newline at end of file diff --git a/src/test/resources/cloudtrail-sample.json b/src/test/resources/cloudtrail-sample.json new file mode 100644 index 000000000..1732bff36 --- /dev/null +++ b/src/test/resources/cloudtrail-sample.json @@ -0,0 +1,31 @@ +{ + "cloud.account.id": "213123", + "cloud.region": "213123", + "source.geo.country_iso_code": "213123", + "source.geo.country_name": "213123", + "source.as.organization.name": "213123", + "source.ip": "213123", + "userIdentity.arn": "213123", + "eventName": "213123", + "eventType": "213123", + "errorCode": "213123", + "eventSource": "213123", + "tlsDetails.tlsVersion": "213123", + "user_agent.name": "213123", + "threat.matched.providers": "213123", + "aws-cloudtrail-event_name": "123", + "aws-cloudtrail-event_source": "123", + "aws-cloudtrail-event_type": "123", + "aws-cloudtrail-error_message": "123", + "aws-cloudtrail-error_code": "123", + "aws-cloudtrail-response_elements-text": "123", + "aws-cloudtrail-response_elements-pending_modified_values-master_user_password": "123", + "aws-cloudtrail-response_elements-publicly_accessible": "123", + "aws-cloudtrail-request_parameters-arn": "123", + "aws-cloudtrail-request_parameters-attribute": "123", + "aws-cloudtrail-request_parameters-username": "123", + "aws-cloudtrail-request_parameters-container_definitions-command": "123", + "aws-cloudtrail-user_identity-session_context-session_issuer-type": "123", + "aws-cloudtrail-user_identity-arn": "123", + "@timestamp": "2022-12-27T20:29:31.734Z" +} diff --git a/src/test/resources/dns-sample.json b/src/test/resources/dns-sample.json new file mode 100644 index 000000000..1c4df84ce --- /dev/null +++ b/src/test/resources/dns-sample.json @@ -0,0 +1,50 @@ +{ + "dns": { + "additionals_count": 0, + "answers": [ + { + "class": "IN", + "data": "192.168.73.66", + "name": "chat.testdomain.loc", + "ttl": "52", + "type": "A" + } + ], + "answers_count": 1, + "authorities_count": 0, + "flags": { + "authentic_data": false, + "authoritative": false, + "checking_disabled": false, + "recursion_available": true, + "recursion_desired": true, + "truncated_response": false + }, + "header_flags": [ + "RD", + "RA" + ], + "id": 59295, + "op_code": "QUERY", + "opt": { + "do": false, + "ext_rcode": "NOERROR", + "udp_size": 512, + "version": "0" + }, + "question": { + "class": "IN", + "etld_plus_one": "testdomain.loc", + "name": "chat.testdomain.loc", + "registered_domain": "testdomain.loc", + "subdomain": "chat", + "top_level_domain": "loc", + "type": "A" + }, + "resolved_ip": [ + "192.168.73.66" + ], + "response_code": "NOERROR", + "type": "answer" + } +} \ No newline at end of file diff --git a/src/test/resources/s3-sample.json b/src/test/resources/s3-sample.json new file mode 100644 index 000000000..58253cd99 --- /dev/null +++ b/src/test/resources/s3-sample.json @@ -0,0 +1,14 @@ +{ + "cloud.region": "123", + "source.geo.country_iso_code": "123", + "source.ip": "123", + "Bucket": "123", + "ErrorCode": "123", + "HTTPstatus": "123", + "Operation": "123", + "RequestURI_key": "123", + "aws.s3access.requester": "1234", + "aws-cloudtrail-event_source": "123", + "aws-cloudtrail-event_name": "123", + "@timestamp": "123" +} \ No newline at end of file diff --git a/src/test/resources/sample.pem b/src/test/resources/sample.pem new file mode 100644 index 000000000..7ba92534e --- /dev/null +++ b/src/test/resources/sample.pem @@ -0,0 +1,28 @@ +-----BEGIN CERTIFICATE----- +MIIEyTCCA7GgAwIBAgIGAWLrc1O2MA0GCSqGSIb3DQEBCwUAMIGPMRMwEQYKCZIm +iZPyLGQBGRYDY29tMRcwFQYKCZImiZPyLGQBGRYHZXhhbXBsZTEZMBcGA1UECgwQ +RXhhbXBsZSBDb20gSW5jLjEhMB8GA1UECwwYRXhhbXBsZSBDb20gSW5jLiBSb290 +IENBMSEwHwYDVQQDDBhFeGFtcGxlIENvbSBJbmMuIFJvb3QgQ0EwHhcNMTgwNDIy +MDM0MzQ3WhcNMjgwNDE5MDM0MzQ3WjBeMRIwEAYKCZImiZPyLGQBGRYCZGUxDTAL +BgNVBAcMBHRlc3QxDTALBgNVBAoMBG5vZGUxDTALBgNVBAsMBG5vZGUxGzAZBgNV +BAMMEm5vZGUtMC5leGFtcGxlLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC +AQoCggEBAJa+f476vLB+AwK53biYByUwN+40D8jMIovGXm6wgT8+9Sbs899dDXgt +9CE1Beo65oP1+JUz4c7UHMrCY3ePiDt4cidHVzEQ2g0YoVrQWv0RedS/yx/DKhs8 +Pw1O715oftP53p/2ijD5DifFv1eKfkhFH+lwny/vMSNxellpl6NxJTiJVnQ9HYOL +gf2t971ITJHnAuuxUF48HcuNovW4rhtkXef8kaAN7cE3LU+A9T474ULNCKkEFPIl +ZAKN3iJNFdVsxrTU+CUBHzk73Do1cCkEvJZ0ZFjp0Z3y8wLY/gqWGfGVyA9l2CUq +eIZNf55PNPtGzOrvvONiui48vBKH1LsCAwEAAaOCAVkwggFVMIG8BgNVHSMEgbQw +gbGAFJI1DOAPHitF9k0583tfouYSl0BzoYGVpIGSMIGPMRMwEQYKCZImiZPyLGQB +GRYDY29tMRcwFQYKCZImiZPyLGQBGRYHZXhhbXBsZTEZMBcGA1UECgwQRXhhbXBs +ZSBDb20gSW5jLjEhMB8GA1UECwwYRXhhbXBsZSBDb20gSW5jLiBSb290IENBMSEw +HwYDVQQDDBhFeGFtcGxlIENvbSBJbmMuIFJvb3QgQ0GCAQEwHQYDVR0OBBYEFKyv +78ZmFjVKM9g7pMConYH7FVBHMAwGA1UdEwEB/wQCMAAwDgYDVR0PAQH/BAQDAgXg +MCAGA1UdJQEB/wQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjA1BgNVHREELjAsiAUq +AwQFBYISbm9kZS0wLmV4YW1wbGUuY29tgglsb2NhbGhvc3SHBH8AAAEwDQYJKoZI +hvcNAQELBQADggEBAIOKuyXsFfGv1hI/Lkpd/73QNqjqJdxQclX57GOMWNbOM5H0 +5/9AOIZ5JQsWULNKN77aHjLRr4owq2jGbpc/Z6kAd+eiatkcpnbtbGrhKpOtoEZy +8KuslwkeixpzLDNISSbkeLpXz4xJI1ETMN/VG8ZZP1bjzlHziHHDu0JNZ6TnNzKr +XzCGMCohFfem8vnKNnKUneMQMvXd3rzUaAgvtf7Hc2LTBlf4fZzZF1EkwdSXhaMA +1lkfHiqOBxtgeDLxCHESZ2fqgVqsWX+t3qHQfivcPW6txtDyrFPRdJOGhiMGzT/t +e/9kkAtQRgpTb3skYdIOOUOV0WGQ60kJlFhAzIs= +-----END CERTIFICATE----- diff --git a/src/test/resources/test-kirk.jks b/src/test/resources/test-kirk.jks new file mode 100644 index 000000000..174dbda65 Binary files /dev/null and b/src/test/resources/test-kirk.jks differ