From b226cb57520f46404881c0849b4c1d0d78da65d2 Mon Sep 17 00:00:00 2001 From: shelld3v <59408894+shelld3v@users.noreply.github.com> Date: Mon, 2 May 2022 16:53:43 +0700 Subject: [PATCH 01/19] Updated some data source integrations --- README.md | 4 +- resources/scripts/api/arin.ads | 51 ------------------------- resources/scripts/api/greynoise.ads | 2 +- resources/scripts/api/ipdata.ads | 12 +++--- resources/scripts/api/n45ht.ads | 32 ---------------- resources/scripts/scrape/duckduckgo.ads | 2 +- resources/scripts/scrape/gists.ads | 2 +- 7 files changed, 11 insertions(+), 94 deletions(-) delete mode 100644 resources/scripts/api/arin.ads delete mode 100644 resources/scripts/api/n45ht.ads diff --git a/README.md b/README.md index 31a8c892f..b42020daf 100644 --- a/README.md +++ b/README.md @@ -24,10 +24,10 @@ The OWASP Amass Project performs network mapping of attack surfaces and external | Technique | Data Sources | |:-------------|:-------------| -| APIs | 360PassiveDNS, Ahrefs, AnubisDB, BinaryEdge, BufferOver, BuiltWith, C99, Chaos, CIRCL, Cloudflare, DNSDB, DNSRepo, Detectify, FOFA, FullHunt, GitHub, GitLab, Greynoise, HackerTarget, Hunter, IntelX, LeakIX, Maltiverse, Mnemonic, N45HT, PassiveTotal, PentestTools, Quake, Shodan, SonarSearch, Spamhaus, Spyse, Sublist3rAPI, ThreatBook, ThreatCrowd, ThreatMiner, Twitter, URLScan, VirusTotal, ZETAlytics, ZoomEye | +| APIs | 360PassiveDNS, Ahrefs, AnubisDB, BinaryEdge, BufferOver, BuiltWith, C99, Chaos, CIRCL, Cloudflare, DNSDB, DNSRepo, Detectify, FOFA, FullHunt, GitHub, GitLab, Greynoise, HackerTarget, Hunter, IntelX, LeakIX, Maltiverse, Mnemonic, PassiveTotal, PentestTools, Quake, Shodan, SonarSearch, Spamhaus, Spyse, Sublist3rAPI, ThreatBook, ThreatCrowd, ThreatMiner, Twitter, URLScan, VirusTotal, ZETAlytics, ZoomEye | | Certificates | Active pulls (optional), Censys, CertSpotter, Crtsh, Digitorus, FacebookCT, GoogleCT | | DNS | Brute forcing, Reverse DNS sweeping, NSEC zone walking, Zone transfers, FQDN alterations/permutations, FQDN Similarity-based Guessing | -| Routing | ARIN, BGPTools, BGPView, IPdata, IPinfo, NetworksDB, RADb, Robtex, ShadowServer, TeamCymru | +| Routing | BGPTools, BGPView, IPdata, IPinfo, NetworksDB, RADb, Robtex, ShadowServer, TeamCymru | | Scraping | AbuseIPDB, Ask, Baidu, Bing, DNSDumpster, DuckDuckGo, Gists, HackerOne, HyperStat, IPv4Info, PKey, RapidDNS, Riddler, Searchcode, Searx, SiteDossier, Yahoo | | Web Archives | ArchiveIt, Arquivo, CommonCrawl, HAW, UKWebArchive, Wayback | | WHOIS | AlienVault, AskDNS, DNSlytics, ONYPHE, SecurityTrails, SpyOnWeb, Umbrella, WhoisXMLAPI | diff --git a/resources/scripts/api/arin.ads b/resources/scripts/api/arin.ads deleted file mode 100644 index c76fe2c48..000000000 --- a/resources/scripts/api/arin.ads +++ /dev/null @@ -1,51 +0,0 @@ --- Copyright 2021 Jeff Foley. All rights reserved. --- Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. - -local json = require("json") - -name = "ARIN" -type = "api" - -function start() - set_rate_limit(1) -end - -function asn(ctx, addr, asn) - if addr == "" then - return - end - - local resp, err = request(ctx, {url=asn_url(addr)}) - if (err ~= nil and err ~= "") then - log(ctx, "asn request to service failed: " .. err) - return - end - - local j = json.decode(resp) - if (j == nil or j.cidr0_cidrs == nil or j.arin_originas0_originautnums == nil or - #(j.cidr0_cidrs) == 0 or #(j.arin_originas0_originautnums) == 0) then - return - end - local asn = j.arin_originas0_originautnums[1] - - if (j.cidr0_cidrs[1]['v4prefix'] == nil or j.cidr0_cidrs[1]['v4prefix'] == "") then - return - end - local cidr = j.cidr0_cidrs[1]['v4prefix'] .. "/" .. tostring(j.cidr0_cidrs[1]['length']) - - if j.entities[1]['vcardArray'] == nil then - return - end - local desc = j.name .. " - " .. j.entities[1]['vcardArray'][2][2][4] - - new_asn(ctx, { - ['addr']=addr, - ['asn']=asn, - ['desc']=desc, - ['prefix']=cidr, - }) -end - -function asn_url(addr) - return "https://rdap.arin.net/registry/ip/" .. addr -end diff --git a/resources/scripts/api/greynoise.ads b/resources/scripts/api/greynoise.ads index f6bd2d1b5..18423834e 100644 --- a/resources/scripts/api/greynoise.ads +++ b/resources/scripts/api/greynoise.ads @@ -33,5 +33,5 @@ function vertical(ctx, domain) end function build_url(domain) - return "https://greynoise-prod.herokuapp.com/enterprise/v2/experimental/gnql?size=1000&query=metadata.rdns:*." .. domain + return "https://www.greynoise.io/api/enterprise/v2/experimental/gnql?size=1000&query=metadata.rdns:*." .. domain end diff --git a/resources/scripts/api/ipdata.ads b/resources/scripts/api/ipdata.ads index 3e8933878..308025f11 100644 --- a/resources/scripts/api/ipdata.ads +++ b/resources/scripts/api/ipdata.ads @@ -44,17 +44,17 @@ function asn(ctx, addr, asn) return end - local d = json.decode(resp) - if (d == nil or d.asn == nil) then + local j = json.decode(resp) + if (j == nil or j.asn == nil) then return end new_asn(ctx, { ['addr']=addr, - ['asn']=tonumber(d.asn:gsub(3)), - desc=d.name, - prefix=d.route, - netblocks={d.route}, + ['asn']=tonumber(string.sub(j.asn, 3)), + desc=j.name, + prefix=j.route, + netblocks={j.route}, }) end diff --git a/resources/scripts/api/n45ht.ads b/resources/scripts/api/n45ht.ads deleted file mode 100644 index 60f43f423..000000000 --- a/resources/scripts/api/n45ht.ads +++ /dev/null @@ -1,32 +0,0 @@ --- Copyright 2021 Jeff Foley. All rights reserved. --- Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. - -local json = require("json") - -name = "N45HT" -type = "api" - -function start() - set_rate_limit(3) -end - -function vertical(ctx, domain) - local resp, err = request(ctx, {['url']=build_url(domain)}) - if (err ~= nil and err ~= "") then - log(ctx, "vertical request to service failed: " .. err) - return - end - - local d = json.decode(resp) - if (d == nil or #(d.subdomains) <= 1) then - return - end - - for _, sub in pairs(d.subdomains) do - new_name(ctx, sub) - end -end - -function build_url(domain) - return "https://api.n45ht.or.id/v1/subdomain-enumeration?domain=" .. domain -end diff --git a/resources/scripts/scrape/duckduckgo.ads b/resources/scripts/scrape/duckduckgo.ads index d91c549c3..2bd65a001 100644 --- a/resources/scripts/scrape/duckduckgo.ads +++ b/resources/scripts/scrape/duckduckgo.ads @@ -5,7 +5,7 @@ name = "DuckDuckGo" type = "scrape" function start() - set_rate_limit(1) + set_rate_limit(2) end function vertical(ctx, domain) diff --git a/resources/scripts/scrape/gists.ads b/resources/scripts/scrape/gists.ads index dc7fc908a..3ce8baf56 100644 --- a/resources/scripts/scrape/gists.ads +++ b/resources/scripts/scrape/gists.ads @@ -7,7 +7,7 @@ name = "Gists" type = "scrape" function start() - set_rate_limit(1) + set_rate_limit(4) end function vertical(ctx, domain) From dcf6b45e8897560bafc2775fda9ec033edf0e388 Mon Sep 17 00:00:00 2001 From: shelld3v <59408894+shelld3v@users.noreply.github.com> Date: Mon, 2 May 2022 17:49:08 +0700 Subject: [PATCH 02/19] Added DNSHistory as a datasource --- README.md | 2 +- resources/scripts/scrape/dnshistory.ads | 42 +++++++++++++++++++++++++ 2 files changed, 43 insertions(+), 1 deletion(-) create mode 100644 resources/scripts/scrape/dnshistory.ads diff --git a/README.md b/README.md index b42020daf..420c98994 100644 --- a/README.md +++ b/README.md @@ -28,7 +28,7 @@ The OWASP Amass Project performs network mapping of attack surfaces and external | Certificates | Active pulls (optional), Censys, CertSpotter, Crtsh, Digitorus, FacebookCT, GoogleCT | | DNS | Brute forcing, Reverse DNS sweeping, NSEC zone walking, Zone transfers, FQDN alterations/permutations, FQDN Similarity-based Guessing | | Routing | BGPTools, BGPView, IPdata, IPinfo, NetworksDB, RADb, Robtex, ShadowServer, TeamCymru | -| Scraping | AbuseIPDB, Ask, Baidu, Bing, DNSDumpster, DuckDuckGo, Gists, HackerOne, HyperStat, IPv4Info, PKey, RapidDNS, Riddler, Searchcode, Searx, SiteDossier, Yahoo | +| Scraping | AbuseIPDB, Ask, Baidu, Bing, DNSDumpster, DNSHistory, DuckDuckGo, Gists, HackerOne, HyperStat, IPv4Info, PKey, RapidDNS, Riddler, Searchcode, Searx, SiteDossier, Yahoo | | Web Archives | ArchiveIt, Arquivo, CommonCrawl, HAW, UKWebArchive, Wayback | | WHOIS | AlienVault, AskDNS, DNSlytics, ONYPHE, SecurityTrails, SpyOnWeb, Umbrella, WhoisXMLAPI | diff --git a/resources/scripts/scrape/dnshistory.ads b/resources/scripts/scrape/dnshistory.ads new file mode 100644 index 000000000..3883b0339 --- /dev/null +++ b/resources/scripts/scrape/dnshistory.ads @@ -0,0 +1,42 @@ +-- Copyright 2017-2021 Jeff Foley. All rights reserved. +-- Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +name = "DNSHistory" +type = "scrape" + +function start() + set_rate_limit(2) +end + +function vertical(ctx, domain) + local p = 1 + local pattern = "/dns\\-records/(.*)\">" + + while(true) do + local page, err = request(ctx, {['url']=build_url(domain, p)}) + if (err ~= nil and err ~= "") then + log(ctx, "vertical request to service failed: " .. err) + return + end + + local matches = submatch(page, pattern) + if (matches == nil or #matches == 0) then + return + end + + for _, match in pairs(matches) do + send_names(ctx, match[2]) + end + + local nxt = find(page, "next") + if (nxt == nil or #nxt == 0) then + return + end + + p = p + 1 + end +end + +function build_url(domain, pagenum) + return "https://dnshistory.org/subdomains/" .. pagenum .. "/" .. domain +end From d97b5c2909de5bf00463439f5f171897eee571e6 Mon Sep 17 00:00:00 2001 From: shelld3v <59408894+shelld3v@users.noreply.github.com> Date: Sat, 7 May 2022 21:05:45 +0700 Subject: [PATCH 03/19] Added Netlas as a datasource and removed IPv4Info --- README.md | 4 +- examples/config.ini | 5 ++ resources/scripts/api/netlas.ads | 61 ++++++++++++++++ resources/scripts/scrape/ipv4info.ads | 100 -------------------------- 4 files changed, 68 insertions(+), 102 deletions(-) create mode 100644 resources/scripts/api/netlas.ads delete mode 100644 resources/scripts/scrape/ipv4info.ads diff --git a/README.md b/README.md index 31a8c892f..7669e66fe 100644 --- a/README.md +++ b/README.md @@ -24,11 +24,11 @@ The OWASP Amass Project performs network mapping of attack surfaces and external | Technique | Data Sources | |:-------------|:-------------| -| APIs | 360PassiveDNS, Ahrefs, AnubisDB, BinaryEdge, BufferOver, BuiltWith, C99, Chaos, CIRCL, Cloudflare, DNSDB, DNSRepo, Detectify, FOFA, FullHunt, GitHub, GitLab, Greynoise, HackerTarget, Hunter, IntelX, LeakIX, Maltiverse, Mnemonic, N45HT, PassiveTotal, PentestTools, Quake, Shodan, SonarSearch, Spamhaus, Spyse, Sublist3rAPI, ThreatBook, ThreatCrowd, ThreatMiner, Twitter, URLScan, VirusTotal, ZETAlytics, ZoomEye | +| APIs | 360PassiveDNS, Ahrefs, AnubisDB, BinaryEdge, BufferOver, BuiltWith, C99, Chaos, CIRCL, Cloudflare, DNSDB, DNSRepo, Detectify, FOFA, FullHunt, GitHub, GitLab, Greynoise, HackerTarget, Hunter, IntelX, LeakIX, Maltiverse, Mnemonic, N45HT, Netlas, PassiveTotal, PentestTools, Quake, Shodan, SonarSearch, Spamhaus, Spyse, Sublist3rAPI, ThreatBook, ThreatCrowd, ThreatMiner, Twitter, URLScan, VirusTotal, ZETAlytics, ZoomEye | | Certificates | Active pulls (optional), Censys, CertSpotter, Crtsh, Digitorus, FacebookCT, GoogleCT | | DNS | Brute forcing, Reverse DNS sweeping, NSEC zone walking, Zone transfers, FQDN alterations/permutations, FQDN Similarity-based Guessing | | Routing | ARIN, BGPTools, BGPView, IPdata, IPinfo, NetworksDB, RADb, Robtex, ShadowServer, TeamCymru | -| Scraping | AbuseIPDB, Ask, Baidu, Bing, DNSDumpster, DuckDuckGo, Gists, HackerOne, HyperStat, IPv4Info, PKey, RapidDNS, Riddler, Searchcode, Searx, SiteDossier, Yahoo | +| Scraping | AbuseIPDB, Ask, Baidu, Bing, DNSDumpster, DuckDuckGo, Gists, HackerOne, HyperStat, PKey, RapidDNS, Riddler, Searchcode, Searx, SiteDossier, Yahoo | | Web Archives | ArchiveIt, Arquivo, CommonCrawl, HAW, UKWebArchive, Wayback | | WHOIS | AlienVault, AskDNS, DNSlytics, ONYPHE, SecurityTrails, SpyOnWeb, Umbrella, WhoisXMLAPI | diff --git a/examples/config.ini b/examples/config.ini index 56063d975..5f671db59 100644 --- a/examples/config.ini +++ b/examples/config.ini @@ -261,6 +261,11 @@ minimum_ttl = 1440 ; One day #[data_sources.LeakIX.Credentials] #apikey = +# https://netlas.io (Free) +#[data_sources.Netlas] +#[data_sources.Netlas.Credentials] +#apikey = + # https://networksdb.io (Paid/Free-trial) #[data_sources.NetworksDB] #[data_sources.NetworksDB.Credentials] diff --git a/resources/scripts/api/netlas.ads b/resources/scripts/api/netlas.ads new file mode 100644 index 000000000..96be7a46e --- /dev/null +++ b/resources/scripts/api/netlas.ads @@ -0,0 +1,61 @@ +-- Copyright 2022 Jeff Foley. All rights reserved. +-- Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +local json = require("json") + +name = "Netlas" +type = "api" + +function start() + set_rate_limit(1) +end + +function check() + local c + local cfg = datasrc_config() + if cfg ~= nil then + c = cfg.credentials + end + + if (c ~= nil and c.key ~= nil and c.key ~= "") then + return true + end + return false +end + +function vertical(ctx, domain) + local c + local cfg = datasrc_config() + if cfg ~= nil then + c = cfg.credentials + end + + if (c == nil or c.key == nil or c.key == "") then + return + end + + local resp, err = request(ctx, { + url=build_url(domain), + headers={ + ['Accept']="application/json", + ['X-API-Key']=c.key, + }, + }) + if (err ~= nil and err ~= "") then + log(ctx, "vertical request to service failed: " .. err) + return + end + + local j = json.decode(resp) + if (j == nil or j.items == nil or #j.items == 0) then + return + end + + for _, item in pairs(j.items) do + new_name(ctx, item['data'].domain) + end +end + +function build_url(domain) + return "https://app.netlas.io/api/domains/?q=*." .. domain +end diff --git a/resources/scripts/scrape/ipv4info.ads b/resources/scripts/scrape/ipv4info.ads deleted file mode 100644 index c6da4c8f2..000000000 --- a/resources/scripts/scrape/ipv4info.ads +++ /dev/null @@ -1,100 +0,0 @@ --- Copyright 2021 Jeff Foley. All rights reserved. --- Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. - -name = "IPv4Info" -type = "scrape" - -function start() - set_rate_limit(2) -end - -function vertical(ctx, domain) - local path = get_path(ctx, domain) - if path == "" then - return - end - - local token = get_token(ctx, domain, path) - if token == "" then - return - end - - local u = "http://ipv4info.com/subdomains/" .. token .. "/" .. domain .. ".html" - local resp, err = request(ctx, {['url']=u}) - if (err ~= nil and err ~= "") then - log(ctx, "vertical request to service failed: " .. err) - return - end - - send_names(ctx, resp) - -- Attempt to scrape additional pages of subdomain names - local pagenum = 1 - while(true) do - local last = resp - resp = "" - - local page = "page" .. tostring(pagenum) - local key = domain .. page - - resp = next_page(ctx, domain, last, page) - if (resp == nil or resp == "") then - break - end - - send_names(ctx, resp) - pagenum = pagenum + 1 - end -end - -function get_path(ctx, domain) - local u = "http://ipv4info.com/search/" .. domain - local page, err = request(ctx, {['url']=u}) - if (err ~= nil and err ~= "") then - log(ctx, "get_path request to service failed: " .. err) - return "" - end - - local match = find(page, "/ip-address/(.*)/" .. domain) - if (match == nil or #match == 0) then - return "" - end - - return match[1] -end - -function get_token(ctx, domain, path) - local u = "http://ipv4info.com" .. path - local page, err = request(ctx, {['url']=u}) - if (err ~= nil and err ~= "") then - log(ctx, "get_token request to service failed: " .. err) - return "" - end - - local matches = submatch(page, "/dns/(.*?)/" .. domain) - if (matches == nil or #matches == 0) then - return "" - end - - local match = matches[1] - if (match == nil or #match < 2) then - return "" - end - - return match[2] -end - -function next_page(ctx, domain, resp, page) - local match = find(resp, "/subdomains/(.*)/" .. page .. "/" .. domain .. ".html") - if (match == nil or #match == 0) then - return "" - end - - local u = "http://ipv4info.com" .. match[1] - local page, err = request(ctx, {['url']=u}) - if (err ~= nil and err ~= "") then - log(ctx, "next_page request to service failed: " .. err) - return "" - end - - return page -end From 7168a98981560f6b5f56de6ea5b4d31c415a1ed3 Mon Sep 17 00:00:00 2001 From: shelld3v <59408894+shelld3v@users.noreply.github.com> Date: Sat, 7 May 2022 21:28:41 +0700 Subject: [PATCH 04/19] Small changes --- resources/scripts/scrape/dnshistory.ads | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/resources/scripts/scrape/dnshistory.ads b/resources/scripts/scrape/dnshistory.ads index 3883b0339..44c263564 100644 --- a/resources/scripts/scrape/dnshistory.ads +++ b/resources/scripts/scrape/dnshistory.ads @@ -1,4 +1,4 @@ --- Copyright 2017-2021 Jeff Foley. All rights reserved. +-- Copyright 2022 Jeff Foley. All rights reserved. -- Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. name = "DNSHistory" @@ -25,7 +25,7 @@ function vertical(ctx, domain) end for _, match in pairs(matches) do - send_names(ctx, match[2]) + new_name(ctx, match[2]) end local nxt = find(page, "next") From f0fe8c4fb3c8afab3bd30055372e1a7c0da12654 Mon Sep 17 00:00:00 2001 From: shelld3v <59408894+shelld3v@users.noreply.github.com> Date: Sun, 8 May 2022 15:32:46 +0700 Subject: [PATCH 05/19] Ported FOFA to lua script implementation --- datasrcs/fofa.go | 107 --------------------------------- datasrcs/sources.go | 1 - examples/config.ini | 2 +- resources/scripts/api/fofa.ads | 98 ++++++++++++++++++++++++++++++ 4 files changed, 99 insertions(+), 109 deletions(-) delete mode 100644 datasrcs/fofa.go create mode 100644 resources/scripts/api/fofa.ads diff --git a/datasrcs/fofa.go b/datasrcs/fofa.go deleted file mode 100644 index 9366d45ff..000000000 --- a/datasrcs/fofa.go +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright © by Jeff Foley 2021-2022. All rights reserved. -// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. -// SPDX-License-Identifier: Apache-2.0 - -package datasrcs - -import ( - "context" - "errors" - "fmt" - - "github.com/OWASP/Amass/v3/config" - "github.com/OWASP/Amass/v3/requests" - "github.com/OWASP/Amass/v3/systems" - "github.com/caffix/service" - "github.com/fofapro/fofa-go/fofa" -) - -// FOFA is the Service that handles access to the FOFA data source. -type FOFA struct { - service.BaseService - - SourceType string - sys systems.System - creds *config.Credentials -} - -// NewFOFA returns he object initialized, but not yet started. -func NewFOFA(sys systems.System) *FOFA { - f := &FOFA{ - SourceType: requests.SCRAPE, - sys: sys, - } - - go f.requests() - f.BaseService = *service.NewBaseService(f, "FOFA") - return f -} - -// Description implements the Service interface. -func (f *FOFA) Description() string { - return f.SourceType -} - -// OnStart implements the Service interface. -func (f *FOFA) OnStart() error { - f.creds = f.sys.Config().GetDataSourceConfig(f.String()).GetCredentials() - - if f.creds == nil || f.creds.Username == "" || f.creds.Key == "" { - estr := fmt.Sprintf("%s: Email address or API key data was not provided", f.String()) - - f.sys.Config().Log.Print(estr) - return errors.New(estr) - } - - f.SetRateLimit(1) - return nil -} - -func (f *FOFA) requests() { - for { - select { - case <-f.Done(): - return - case in := <-f.Input(): - switch req := in.(type) { - case *requests.DNSRequest: - f.CheckRateLimit() - f.dnsRequest(context.TODO(), req) - } - } - } -} - -func (f *FOFA) dnsRequest(ctx context.Context, req *requests.DNSRequest) { - if f.creds == nil || f.creds.Username == "" || f.creds.Key == "" { - return - } - - if !f.sys.Config().IsDomainInScope(req.Domain) { - return - } - - f.sys.Config().Log.Printf("Querying %s for %s subdomains", f.String(), req.Domain) - - client := fofa.NewFofaClient([]byte(f.creds.Username), []byte(f.creds.Key)) - if client == nil { - f.sys.Config().Log.Printf("%s: Failed to create FOFA client", f.String()) - return - } - - for i := 1; i <= 10; i++ { - results, err := client.QueryAsArray(uint(i), []byte(fmt.Sprintf("domain=\"%s\"", req.Domain)), []byte("domain")) - if err != nil { - f.sys.Config().Log.Printf("%s: %v", f.String(), err) - return - } - if len(results) == 0 { - break - } - - for _, res := range results { - genNewNameEvent(ctx, f.sys, f, res.Domain) - } - f.CheckRateLimit() - } -} diff --git a/datasrcs/sources.go b/datasrcs/sources.go index 04b48e7cd..1da6a95ca 100644 --- a/datasrcs/sources.go +++ b/datasrcs/sources.go @@ -22,7 +22,6 @@ func GetAllSources(sys systems.System) []service.Service { NewAlienVault(sys), NewCloudflare(sys), NewDNSDB(sys), - NewFOFA(sys), NewNetworksDB(sys), NewRADb(sys), NewTwitter(sys), diff --git a/examples/config.ini b/examples/config.ini index 56063d975..ec0db6361 100644 --- a/examples/config.ini +++ b/examples/config.ini @@ -210,7 +210,7 @@ minimum_ttl = 1440 ; One day #apikey = #secret = -# https://fofa.so (Paid) +# https://fofa.info (Paid) #[data_sources.FOFA] #ttl = 10080 #[data_sources.FOFA.Credentials] diff --git a/resources/scripts/api/fofa.ads b/resources/scripts/api/fofa.ads new file mode 100644 index 000000000..c4919f493 --- /dev/null +++ b/resources/scripts/api/fofa.ads @@ -0,0 +1,98 @@ +-- Copyright 2022 Jeff Foley. All rights reserved. +-- Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +local json = require("json") +local url = require("url") + +name = "FOFA" +type = "api" + +function start() + set_rate_limit(1) +end + +function check() + local c + local cfg = datasrc_config() + if cfg ~= nil then + c = cfg.credentials + end + + if (c ~= nil and c.username ~= nil and + c.key ~= nil and c.username ~= "" and c.key ~= "") then + return true + end + return false +end + +function vertical(ctx, domain) + local c + local cfg = datasrc_config() + if cfg ~= nil then + c = cfg.credentials + end + + if (c == nil or c.username == nil or + c.username == "" or c.key == nil or c.key == "") then + return + end + + local p = 1 + while(true) do + local resp, err = request(ctx, { + ['url']=build_url(domain, c.username, c.key, p) + }) + if (err ~= nil and err ~= "") then + log(ctx, "vertical request to service failed: " .. err) + return + end + + local j = json.decode(resp) + if (j == nil or j.error == true or j.size == 0) then + if (j.errmsg ~= nil and j.errmsg ~= "") then + log(ctx, "vertical request to service failed: " .. j.errmsg) + end + + return + end + + for _, result in pairs(j.results) do + send_names(ctx, result) + end + + if j.size < 10000 then + return + end + i = i + 1 + end +end + +function build_url(domain, username, key, pagenum) + local query = base64_encode("domain=\"" .. domain .. "\"") + local params = { + ['full']="true", + ['fields']="host", + ['size']="10000", + ['page']=pagenum, + ['email']=username, + ['key']=key, + ['qbase64']=query, + } + + return "https://fofa.info/api/v1/search/all?" .. url.build_query_string(params) +end + +function base64_encode(data) + local b = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/" + + return ((data:gsub('.', function(x) + local r,b='',x:byte() + for i=8,1,-1 do r=r..(b%2^i-b%2^(i-1)>0 and '1' or '0') end + return r; + end)..'0000'):gsub('%d%d%d?%d?%d?%d?', function(x) + if (#x < 6) then return '' end + local c=0 + for i=1,6 do c=c+(x:sub(i,i)=='1' and 2^(6-i) or 0) end + return b:sub(c+1,c+1) + end)..({ '', '==', '=' })[#data%3+1]) +end From c460ae6c0409809072c34cf868514e5fc57abd1a Mon Sep 17 00:00:00 2001 From: Pham Sy Minh <59408894+shelld3v@users.noreply.github.com> Date: Sun, 8 May 2022 16:42:08 +0700 Subject: [PATCH 06/19] Updated SearX instances list --- resources/scripts/scrape/searx.ads | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/resources/scripts/scrape/searx.ads b/resources/scripts/scrape/searx.ads index e53579fc5..161376f09 100644 --- a/resources/scripts/scrape/searx.ads +++ b/resources/scripts/scrape/searx.ads @@ -7,24 +7,27 @@ name = "Searx" type = "scrape" function start() - set_rate_limit(4) + set_rate_limit(2) math.randomseed(os.time()) end function vertical(ctx, domain) - -- Qualified best Searx instances + -- Qualified best SearX/SearXNG instances local instances = { "https://anon.sx", + "https://etsi.me", + "https://northboot.xyz", + "https://procurx.pt", + "https://searx.be", "https://searx.info", + "https://searx.ninja", "https://searx.ru", - "https://searx.run", - "https://searx.sk", - "https://xeek.com", + "https://swag.pw", } -- Randomly choose one instance for scraping - local host = instances[math.random(1, 6)] .. "/search" + local host = instances[math.random(1, 9)] .. "/search" - for i=1,15 do + for i=1,10 do local query = "site:" .. domain .. " -www" local params = { ['q']=query, From 16bf2888e79ff4dc4971d8ab71bba98e3a5bf0b8 Mon Sep 17 00:00:00 2001 From: shelld3v <59408894+shelld3v@users.noreply.github.com> Date: Sun, 8 May 2022 23:25:12 +0700 Subject: [PATCH 07/19] Added Yandex datasource and adjusted rate limit of Gists --- README.md | 2 +- examples/config.ini | 13 +++++- resources/scripts/scrape/gists.ads | 2 +- resources/scripts/scrape/yandex.ads | 72 +++++++++++++++++++++++++++++ 4 files changed, 85 insertions(+), 4 deletions(-) create mode 100644 resources/scripts/scrape/yandex.ads diff --git a/README.md b/README.md index 420c98994..3855bd227 100644 --- a/README.md +++ b/README.md @@ -28,7 +28,7 @@ The OWASP Amass Project performs network mapping of attack surfaces and external | Certificates | Active pulls (optional), Censys, CertSpotter, Crtsh, Digitorus, FacebookCT, GoogleCT | | DNS | Brute forcing, Reverse DNS sweeping, NSEC zone walking, Zone transfers, FQDN alterations/permutations, FQDN Similarity-based Guessing | | Routing | BGPTools, BGPView, IPdata, IPinfo, NetworksDB, RADb, Robtex, ShadowServer, TeamCymru | -| Scraping | AbuseIPDB, Ask, Baidu, Bing, DNSDumpster, DNSHistory, DuckDuckGo, Gists, HackerOne, HyperStat, IPv4Info, PKey, RapidDNS, Riddler, Searchcode, Searx, SiteDossier, Yahoo | +| Scraping | AbuseIPDB, Ask, Baidu, Bing, DNSDumpster, DNSHistory, DuckDuckGo, Gists, HackerOne, HyperStat, IPv4Info, PKey, RapidDNS, Riddler, Searchcode, Searx, SiteDossier, Yahoo, Yandex | | Web Archives | ArchiveIt, Arquivo, CommonCrawl, HAW, UKWebArchive, Wayback | | WHOIS | AlienVault, AskDNS, DNSlytics, ONYPHE, SecurityTrails, SpyOnWeb, Umbrella, WhoisXMLAPI | diff --git a/examples/config.ini b/examples/config.ini index 56063d975..005f1bc3c 100644 --- a/examples/config.ini +++ b/examples/config.ini @@ -133,7 +133,7 @@ minimum_ttl = 1440 ; One day #[data_sources.BinaryEdge.Credentials] #apikey = -# https://tls.bufferover.run/dns?q=.example.com (Paid/Free) +# https://tls.bufferover.run (Freemium) #[data_sources.BufferOver] #[data_sources.BufferOver.Credentials] #apikey = @@ -256,7 +256,7 @@ minimum_ttl = 1440 ; One day #[data_sources.IPinfo.Credentials] #apikey = -# https://leakix.net/ (Free) +# https://leakix.net (Free) #[data_sources.LeakIX] #[data_sources.LeakIX.Credentials] #apikey = @@ -360,8 +360,17 @@ minimum_ttl = 1440 ; One day #[data_sources.ZETAlytics.Credentials] #apikey = +# https://zoomeye.org (Free) #[data_sources.ZoomEye] #ttl = 1440 #[data_sources.ZoomEye.Credentials] #username = #password = + +# https://yandex.com/dev/xml/ (Free) +# Restrictions and requirements: https://yandex.com/dev/xml/doc/dg/concepts/restrictions-new.html +#[data_sources.Yandex] +#ttl = 1440 +#[data_sources.Yandex.Credentials] +#username = +#apikey = diff --git a/resources/scripts/scrape/gists.ads b/resources/scripts/scrape/gists.ads index 3ce8baf56..6bb276841 100644 --- a/resources/scripts/scrape/gists.ads +++ b/resources/scripts/scrape/gists.ads @@ -7,7 +7,7 @@ name = "Gists" type = "scrape" function start() - set_rate_limit(4) + set_rate_limit(2) end function vertical(ctx, domain) diff --git a/resources/scripts/scrape/yandex.ads b/resources/scripts/scrape/yandex.ads new file mode 100644 index 000000000..179d0cce3 --- /dev/null +++ b/resources/scripts/scrape/yandex.ads @@ -0,0 +1,72 @@ +-- Copyright 2022 Jeff Foley. All rights reserved. +-- Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +local url = require("url") + +name = "Yandex" +type = "scrape" + +function start() + set_rate_limit(2) +end + +function check() + local c + local cfg = datasrc_config() + if cfg ~= nil then + c = cfg.credentials + end + + if (c ~= nil and c.username ~= nil and + c.key ~= nil and c.username ~= "" and c.key ~= "") then + return true + end + return false +end + +function vertical(ctx, domain) + local c + local cfg = datasrc_config() + if cfg ~= nil then + c = cfg.credentials + end + + if (c == nil or c.username == nil or + c.username == "" or c.key == nil or c.key == "") then + return + end + + local tlds = {"com", "com.tr", "ru"} + + for _, tld in pairs(tlds) do + local correct_tld = false + for i=1,10 do + local found = scrape(ctx, { + url=build_url(c.username, c.key, domain, tld, i), + }) + + if not found then + break + elseif i == 1 then + correct_tld = true + end + end + + if correct_tld then + break + end + end +end + +function build_url(username, key, domain, tld, pagenum) + local query = "site:" .. domain .. " -www" + local params = { + ['maxpassages']=1, + ['user']=username, + ['key']=key, + ['query']=query, + ['page']=pagenum, + } + + return "https://yandex." .. tld .. "/search/xml?" .. url.build_query_string(params) +end From 1d783967638adb7f8b9e112c4592eb515d9ebfc4 Mon Sep 17 00:00:00 2001 From: Pham Sy Minh <59408894+shelld3v@users.noreply.github.com> Date: Mon, 9 May 2022 00:05:28 +0700 Subject: [PATCH 08/19] Fixed Yandex datasource --- resources/scripts/scrape/yandex.ads | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/resources/scripts/scrape/yandex.ads b/resources/scripts/scrape/yandex.ads index 179d0cce3..38686f114 100644 --- a/resources/scripts/scrape/yandex.ads +++ b/resources/scripts/scrape/yandex.ads @@ -42,7 +42,7 @@ function vertical(ctx, domain) local correct_tld = false for i=1,10 do local found = scrape(ctx, { - url=build_url(c.username, c.key, domain, tld, i), + ['url']=build_url(c.username, c.key, domain, tld, i), }) if not found then From d4861a50cd0b30aac6bdfebf525d2b4c55c370d9 Mon Sep 17 00:00:00 2001 From: Pham Sy Minh <59408894+shelld3v@users.noreply.github.com> Date: Mon, 9 May 2022 11:11:26 +0700 Subject: [PATCH 09/19] Added support for commercial BufferOver API key --- resources/scripts/api/bufferover.ads | 27 +++++++++++++++++++++++---- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/resources/scripts/api/bufferover.ads b/resources/scripts/api/bufferover.ads index f4852fef1..60a033646 100644 --- a/resources/scripts/api/bufferover.ads +++ b/resources/scripts/api/bufferover.ads @@ -16,15 +16,34 @@ function vertical(ctx, domain) end if (c ~= nil and c.key ~= nil and c.key ~= "") then - scrape(ctx, { - url=build_url(domain, "tls"), - headers={['x-api-key']=c["key"]}, - }) + local ok = commercial_api_query(ctx, domain, c.key) + if not ok then + scrape(ctx, { + url=build_url(domain, "tls"), + headers={['x-api-key']=c.key}, + }) + end end scrape(ctx, {url=build_url(domain, "dns")}) end +function commercial_api_query(ctx, domain, key) + local resp, err = request(ctx, { + url="https://bufferover-run-tls.p.rapidapi.com/ipv4/dns?q=." .. domain, + headers={ + ['x-rapidapi-host']="bufferover-run-tls.p.rapidapi.com", + ['x-rapidapi-key']=key, + }, + }) + if (err ~= nil and err ~= "") then + return + end + + send_names(ctx, resp) + return true +end + function build_url(domain, sub) return "https://" .. sub .. ".bufferover.run/dns?q=." .. domain end From 0d76f66422b1cb59e9a2d47add7d227dcfd3190a Mon Sep 17 00:00:00 2001 From: Pham Sy Minh <59408894+shelld3v@users.noreply.github.com> Date: Mon, 9 May 2022 12:03:29 +0700 Subject: [PATCH 10/19] Add check for Censys --- resources/scripts/cert/censys.ads | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/resources/scripts/cert/censys.ads b/resources/scripts/cert/censys.ads index 797961b71..5447032c2 100644 --- a/resources/scripts/cert/censys.ads +++ b/resources/scripts/cert/censys.ads @@ -11,6 +11,20 @@ function start() set_rate_limit(3) end +function check() + local c + local cfg = datasrc_config() + if cfg ~= nil then + c = cfg.credentials + end + + if (c ~= nil and c.key ~= nil and + c.key ~= "" and c.secret ~= nil and c.secret ~= "") then + return true + end + return false +end + function vertical(ctx, domain) local c local cfg = datasrc_config() From a1e6763c7037fddf13bcd52e0c70cc119f3fe08f Mon Sep 17 00:00:00 2001 From: Pham Sy Minh <59408894+shelld3v@users.noreply.github.com> Date: Fri, 20 May 2022 22:31:09 +0700 Subject: [PATCH 11/19] Avoid associated domain being duplicated of input domain --- datasrcs/scripting/new.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datasrcs/scripting/new.go b/datasrcs/scripting/new.go index e454af987..8bcc2d825 100644 --- a/datasrcs/scripting/new.go +++ b/datasrcs/scripting/new.go @@ -162,7 +162,7 @@ func (s *Script) newASN(L *lua.LState) int { // Wrapper so that scripts can send discovered associated domains to Amass. func (s *Script) associated(L *lua.LState) int { if ctx, err := extractContext(L.CheckUserData(1)); err == nil && !contextExpired(ctx) { - if domain, assoc := L.CheckString(2), L.CheckString(3); err == nil && domain != "" && assoc != "" { + if domain, assoc := L.CheckString(2), L.CheckString(3); err == nil && domain != "" && assoc != "" && domain != assoc { select { case <-ctx.Done(): case <-s.Done(): From 9d1d9b3dd651fb190d8c45f47908d9fbcd2e4545 Mon Sep 17 00:00:00 2001 From: shelld3v <59408894+shelld3v@users.noreply.github.com> Date: Sun, 22 May 2022 15:13:17 +0700 Subject: [PATCH 12/19] Improved GitLab data source and ported Searchcode source to API usage --- README.md | 4 +-- examples/config.ini | 9 +++-- resources/scripts/api/gitlab.ads | 36 ++++++++++++++++---- resources/scripts/api/searchcode.ads | 22 ++++++++++++ resources/scripts/scrape/searchcode.ads | 45 ------------------------- 5 files changed, 57 insertions(+), 59 deletions(-) create mode 100644 resources/scripts/api/searchcode.ads delete mode 100644 resources/scripts/scrape/searchcode.ads diff --git a/README.md b/README.md index 31a8c892f..e91f0cae0 100644 --- a/README.md +++ b/README.md @@ -24,11 +24,11 @@ The OWASP Amass Project performs network mapping of attack surfaces and external | Technique | Data Sources | |:-------------|:-------------| -| APIs | 360PassiveDNS, Ahrefs, AnubisDB, BinaryEdge, BufferOver, BuiltWith, C99, Chaos, CIRCL, Cloudflare, DNSDB, DNSRepo, Detectify, FOFA, FullHunt, GitHub, GitLab, Greynoise, HackerTarget, Hunter, IntelX, LeakIX, Maltiverse, Mnemonic, N45HT, PassiveTotal, PentestTools, Quake, Shodan, SonarSearch, Spamhaus, Spyse, Sublist3rAPI, ThreatBook, ThreatCrowd, ThreatMiner, Twitter, URLScan, VirusTotal, ZETAlytics, ZoomEye | +| APIs | 360PassiveDNS, Ahrefs, AnubisDB, BinaryEdge, BufferOver, BuiltWith, C99, Chaos, CIRCL, Cloudflare, DNSDB, DNSRepo, Detectify, FOFA, FullHunt, GitHub, GitLab, Greynoise, HackerTarget, Hunter, IntelX, LeakIX, Maltiverse, Mnemonic, N45HT, PassiveTotal, PentestTools, Quake, Searchcode, Shodan, SonarSearch, Spamhaus, Spyse, Sublist3rAPI, ThreatBook, ThreatCrowd, ThreatMiner, Twitter, URLScan, VirusTotal, ZETAlytics, ZoomEye | | Certificates | Active pulls (optional), Censys, CertSpotter, Crtsh, Digitorus, FacebookCT, GoogleCT | | DNS | Brute forcing, Reverse DNS sweeping, NSEC zone walking, Zone transfers, FQDN alterations/permutations, FQDN Similarity-based Guessing | | Routing | ARIN, BGPTools, BGPView, IPdata, IPinfo, NetworksDB, RADb, Robtex, ShadowServer, TeamCymru | -| Scraping | AbuseIPDB, Ask, Baidu, Bing, DNSDumpster, DuckDuckGo, Gists, HackerOne, HyperStat, IPv4Info, PKey, RapidDNS, Riddler, Searchcode, Searx, SiteDossier, Yahoo | +| Scraping | AbuseIPDB, Ask, Baidu, Bing, DNSDumpster, DuckDuckGo, Gists, HackerOne, HyperStat, IPv4Info, PKey, RapidDNS, Riddler, Searx, SiteDossier, Yahoo | | Web Archives | ArchiveIt, Arquivo, CommonCrawl, HAW, UKWebArchive, Wayback | | WHOIS | AlienVault, AskDNS, DNSlytics, ONYPHE, SecurityTrails, SpyOnWeb, Umbrella, WhoisXMLAPI | diff --git a/examples/config.ini b/examples/config.ini index 56063d975..a4a55643c 100644 --- a/examples/config.ini +++ b/examples/config.ini @@ -223,15 +223,14 @@ minimum_ttl = 1440 ; One day #[data_sources.GitHub.accountname] #apikey = -# https://gitlab.com (Freemium) +# https://gitlab.com (Free) +# GitLab apikey is the personal access token with at least read_repository or api scope #[data_sources.GitLab] -#[data_sources.GitLab.free] -#apikey = -#[data_sources.GitLab.premium] +#ttl = 4320 +#[data_sources.GitLab.accountname] #apikey = # https://hackertarget.com (Paid/Free) -# HackerTarget can be used without an API key, but the key allows better results #[data_sources.HackerTarget] #ttl = 1440 #[data_sources.HackerTarget.Credentials] diff --git a/resources/scripts/api/gitlab.ads b/resources/scripts/api/gitlab.ads index 29ee61f4b..367bae2a4 100644 --- a/resources/scripts/api/gitlab.ads +++ b/resources/scripts/api/gitlab.ads @@ -1,6 +1,8 @@ -- Copyright 2021 Jeff Foley. All rights reserved. -- Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. +local json = require("json") + name = "GitLab" type = "api" @@ -32,15 +34,35 @@ function vertical(ctx, domain) return end - local scopes = {"issues", "blobs", "notes"} - for _, s in pairs(scopes) do - scrape(ctx, { - url=build_url(domain, s), - headers={['PRIVATE-TOKEN']=c.key}, + local resp, err = request(ctx, { + ['url']=search_url(domain, scope), + ['headers']={['PRIVATE-TOKEN']=c.key}, + }) + if (err ~= nil and err ~= "") then + log(ctx, "vertical request to service failed: " .. err) + return + end + + local j = json.decode(resp) + if (j == nil or #j == 0) then + return + end + + for _, item in pairs(j) do + local ok = scrape(ctx, { + ['url']=get_file_url(item.project_id, item.path, item.ref), + ['headers']={['PRIVATE-TOKEN']=c.key}, }) + if not ok then + send_names(ctx, item.data) + end end end -function build_url(domain, scope) - return "https://gitlab.com/api/v4/search?scope=" .. scope .. "&search=" .. domain:gsub("%.", "[.]") +function get_file_url(id, path, ref) + return "https://gitlab.com/api/v4/projects/" .. id .. "/repository/files/" .. path:gsub("/", "%%2f") .. "/raw?ref=" .. ref +end + +function search_url(domain) + return "https://gitlab.com/api/v4/search?scope=blobs&search=" .. domain end diff --git a/resources/scripts/api/searchcode.ads b/resources/scripts/api/searchcode.ads new file mode 100644 index 000000000..910c42719 --- /dev/null +++ b/resources/scripts/api/searchcode.ads @@ -0,0 +1,22 @@ +-- Copyright 2021 Jeff Foley. All rights reserved. +-- Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +name = "Searchcode" +type = "api" + +function start() + set_rate_limit(2) +end + +function vertical(ctx, domain) + for i=0,49 do + local ok = scrape(ctx, {['url']=build_url(domain, i)}) + if not ok then + return + end + end +end + +function build_url(domain, pagenum) + return "https://searchcode.com/api/codesearch_I/?per_page=100&q=." .. domain .. "&p=" .. pagenum +end diff --git a/resources/scripts/scrape/searchcode.ads b/resources/scripts/scrape/searchcode.ads deleted file mode 100644 index a50bc035c..000000000 --- a/resources/scripts/scrape/searchcode.ads +++ /dev/null @@ -1,45 +0,0 @@ --- Copyright 2021 Jeff Foley. All rights reserved. --- Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. - -name = "Searchcode" -type = "scrape" - -function start() - set_rate_limit(2) -end - -function vertical(ctx, domain) - for i=0,20 do - local page, err = request(ctx, {['url']=build_url(domain, i)}) - if (err ~= nil and err ~= "") then - log(ctx, "vertical request to service failed: " .. err) - break - end - - local found = find_names(ctx, page:gsub("", ""), domain) - if not found then - break - end - end -end - -function build_url(domain, pagenum) - return "https://searchcode.com/?q=." .. domain .. "&p=" .. pagenum -end - -function find_names(ctx, content, domain) - local names = find(content, subdomain_regex) - if (names == nil or #names == 0) then - return false - end - - local found = false - for _, name in pairs(names) do - if in_scope(ctx, name) then - found = true - new_name(ctx, name) - end - end - - return found -end From 739e7de596f80b954d48529d530e516d2e2f1d75 Mon Sep 17 00:00:00 2001 From: Pham Sy Minh <59408894+shelld3v@users.noreply.github.com> Date: Wed, 25 May 2022 23:38:39 +0700 Subject: [PATCH 13/19] Removed ineffective words for Alterations --- resources/alterations.txt | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/resources/alterations.txt b/resources/alterations.txt index 553c2e6ce..918dd1966 100644 --- a/resources/alterations.txt +++ b/resources/alterations.txt @@ -37,7 +37,6 @@ acc account accounts admin -admin1 administrator akali akamai @@ -46,8 +45,6 @@ alt america analytics api -api-docs -api1 apollo app april @@ -73,7 +70,6 @@ ci client cloudfront cms -cms1 cn com confluence @@ -85,7 +81,6 @@ db dec demo dev -dev1 developer devops docker @@ -119,7 +114,6 @@ g games germany gh -ghcpi git github global @@ -139,7 +133,6 @@ jinx july june k -kor korea kr l @@ -148,7 +141,6 @@ las latin latinamerica lax -lax1 lb loadbalancer login @@ -179,7 +171,6 @@ org origin p page -pantheon pass pay payment @@ -230,7 +221,6 @@ sso staff stag stage -stage1 staging static stg @@ -240,10 +230,8 @@ system t team test -test1 testbed testing -testing1 tomcat tpe tr @@ -264,7 +252,6 @@ vpn w w3 web -web1 webapp westeurope www From 36f3451af553220b1da2758269a2476eb0000a2d Mon Sep 17 00:00:00 2001 From: Pham Sy Minh <59408894+shelld3v@users.noreply.github.com> Date: Mon, 30 May 2022 15:06:16 +0700 Subject: [PATCH 14/19] Updated User Guide --- doc/user_guide.md | 114 +++++++++++++++++++++++++++------------------- 1 file changed, 66 insertions(+), 48 deletions(-) diff --git a/doc/user_guide.md b/doc/user_guide.md index dca1f5e56..e5fe6aaf7 100644 --- a/doc/user_guide.md +++ b/doc/user_guide.md @@ -53,7 +53,17 @@ The amass tool has several subcommands shown below for handling your Internet ex | track | Compare results of enumerations against common target organizations | | db | Manage the graph databases storing the enumeration results | -Each subcommand has its own arguments that are shown in the following sections. +All subcommands have some default global arguments that can be seen below. + +| Flag | Description | Example | +|------|-------------|---------| +| -h/-help | Show the program usage message | amass subcommand -h | +| -config | Path to the INI configuration file | amass subcommand -config config.ini | +| -dir | Path to the directory containing the graph database | amass subcommand -dir PATH -d example.com | +| -nocolor | Disable colorized output | amass subcommand -nocolor -d example.com | +| -silent | Disable all output during execution | amass subcommand -silent -json out.json -d example.com | + +Each subcommand's own arguments are shown in the following sections. ### The 'intel' Subcommand @@ -65,11 +75,9 @@ The intel subcommand can help you discover additional root domain names associat | -addr | IPs and ranges (192.168.1.1-254) separated by commas | amass intel -addr 192.168.2.1-64 | | -asn | ASNs separated by commas (can be used multiple times) | amass intel -asn 13374,14618 | | -cidr | CIDRs separated by commas (can be used multiple times) | amass intel -cidr 104.154.0.0/15 | -| -config | Path to the INI configuration file | amass intel -config config.ini | | -d | Domain names separated by commas (can be used multiple times) | amass intel -whois -d example.com | | -demo | Censor output to make it suitable for demonstrations | amass intel -demo -whois -d example.com | | -df | Path to a file providing root domain names | amass intel -whois -df domains.txt | -| -dir | Path to the directory containing the graph database | amass intel -dir PATH -cidr 104.154.0.0/15 | | -ef | Path to a file providing data sources to exclude | amass intel -whois -ef exclude.txt -d example.com | | -exclude | Data source names separated by commas to be excluded | amass intel -whois -exclude crtsh -d example.com | | -if | Path to a file providing data sources to include | amass intel -whois -if include.txt -d example.com | @@ -87,6 +95,7 @@ The intel subcommand can help you discover additional root domain names associat | -rf | Path to a file providing preferred DNS resolvers | amass intel -rf data/resolvers.txt -whois -d example.com | | -src | Print data sources for the discovered names | amass intel -src -whois -d example.com | | -timeout | Number of minutes to execute the enumeration | amass intel -timeout 30 -d example.com | +| -v | Output status / debug / troubleshooting info | amass intel -v -whois -d example.com | | -whois | All discovered domains are run through reverse whois | amass intel -whois -d example.com | ### The 'enum' Subcommand @@ -96,18 +105,20 @@ This subcommand will perform DNS enumeration and network mapping while populatin | Flag | Description | Example | |------|-------------|---------| | -active | Enable active recon methods | amass enum -active -d example.com -p 80,443,8080 | +| -alts | Enable generation of altered names | amass enum -alts -d example.com | | -aw | Path to a different wordlist file for alterations | amass enum -aw PATH -d example.com | +| -awm | "hashcat-style" wordlist masks for name alterations | amass enum -awm dev?d -d example.com | | -bl | Blacklist of subdomain names that will not be investigated | amass enum -bl blah.example.com -d example.com | | -blf | Path to a file providing blacklisted subdomains | amass enum -blf data/blacklist.txt -d example.com | | -brute | Perform brute force subdomain enumeration | amass enum -brute -d example.com | -| -config | Path to the INI configuration file | amass enum -config config.ini | | -d | Domain names separated by commas (can be used multiple times) | amass enum -d example.com | | -demo | Censor output to make it suitable for demonstrations | amass enum -demo -d example.com | | -df | Path to a file providing root domain names | amass enum -df domains.txt | -| -dir | Path to the directory containing the graph database | amass enum -dir PATH -d example.com | +| -dns-qps | Maximum number of DNS queries per second across all resolvers | amass enum -dns-qps 200 -d example.com | | -ef | Path to a file providing data sources to exclude | amass enum -ef exclude.txt -d example.com | | -exclude | Data source names separated by commas to be excluded | amass enum -exclude crtsh -d example.com | | -if | Path to a file providing data sources to include | amass enum -if include.txt -d example.com | +| -iface | Provide the network interface to send traffic through | amass enum -iface en0 -d example.com | | -include | Data source names separated by commas to be included | amass enum -include crtsh -d example.com | | -ip | Show the IP addresses for discovered names | amass enum -ip -d example.com | | -ipv4 | Show the IPv4 addresses for discovered names | amass enum -ipv4 -d example.com | @@ -115,26 +126,27 @@ This subcommand will perform DNS enumeration and network mapping while populatin | -json | Path to the JSON output file | amass enum -json out.json -d example.com | | -list | Print the names of all available data sources | amass enum -list | | -log | Path to the log file where errors will be written | amass enum -log amass.log -d example.com | +| -max-depth | Maximum number of subdomain labels for brute forcing | amass enum -brute -max-depth 3 -d example.com | | -max-dns-queries | Deprecated flag to be replaced by dns-qps in version 4.0 | amass enum -max-dns-queries 200 -d example.com | -| -dns-qps | Maximum number of DNS queries per second across all resolvers | amass enum -dns-qps 200 -d example.com | -| -rqps | Maximum number of DNS queries per second for each untrusted resolver | amass enum -rqps 10 -d example.com | -| -trqps | Maximum number of DNS queries per second for each trusted resolver | amass enum -trqps 20 -d example.com | | -min-for-recursive | Subdomain labels seen before recursive brute forcing (Default: 1) | amass enum -brute -min-for-recursive 3 -d example.com | -| -max-depth | Maximum number of subdomain labels for brute forcing | amass enum -brute -max-depth 3 -d example.com | | -nf | Path to a file providing already known subdomain names (from other tools/sources) | amass enum -nf names.txt -d example.com | -| -noalts | Disable generation of altered names | amass enum -noalts -d example.com | | -norecursive | Turn off recursive brute forcing | amass enum -brute -norecursive -d example.com | | -o | Path to the text output file | amass enum -o out.txt -d example.com | | -oA | Path prefix used for naming all output files | amass enum -oA amass_scan -d example.com | -| -passive | A purely passive mode of execution | amass enum --passive -d example.com | | -p | Ports separated by commas (default: 443) | amass enum -d example.com -p 443,8080 | +| -passive | A purely passive mode of execution | amass enum --passive -d example.com | | -r | IP addresses of untrusted DNS resolvers (can be used multiple times) | amass enum -r 8.8.8.8,1.1.1.1 -d example.com | -| -tr | IP addresses of trusted DNS resolvers (can be used multiple times) | amass enum -tr 8.8.8.8,1.1.1.1 -d example.com | | -rf | Path to a file providing untrusted DNS resolvers | amass enum -rf data/resolvers.txt -d example.com | -| -trf | Path to a file providing trusted DNS resolvers | amass enum -trf data/trusted.txt -d example.com | +| -rqps | Maximum number of DNS queries per second for each untrusted resolver | amass enum -rqps 10 -d example.com | +| -scripts | Path to a directory containing ADS scripts | amass enum -scripts PATH -d example.com | | -src | Print data sources for the discovered names | amass enum -src -d example.com | | -timeout | Number of minutes to execute the enumeration | amass enum -timeout 30 -d example.com | -| -w | Path to a different wordlist file | amass enum -brute -w wordlist.txt -d example.com | +| -tr | IP addresses of trusted DNS resolvers (can be used multiple times) | amass enum -tr 8.8.8.8,1.1.1.1 -d example.com | +| -trf | Path to a file providing trusted DNS resolvers | amass enum -trf data/trusted.txt -d example.com | +| -trqps | Maximum number of DNS queries per second for each trusted resolver | amass enum -trqps 20 -d example.com | +| -v | Output status / debug / troubleshooting info | amass enum -v -d example.com | +| -w | Path to a different wordlist file for brute forcing | amass enum -brute -w wordlist.txt -d example.com | +| -wm | "hashcat-style" wordlist masks for DNS brute forcing | amass enum -brute -wm ?l?l -d example.com | ### The 'viz' Subcommand @@ -146,19 +158,17 @@ Switches for outputting the DNS and infrastructure findings as a network graph: | Flag | Description | Example | |------|-------------|---------| -| -config | Path to the INI configuration file | amass viz -config config.ini -d3 | | -d | Domain names separated by commas (can be used multiple times) | amass viz -d3 -d example.com | | -d3 | Output a D3.js v4 force simulation HTML file | amass viz -d3 -d example.com | | -df | Path to a file providing root domain names | amass viz -d3 -df domains.txt | -| -dir | Path to the directory containing the graph database | amass viz -d3 -dir PATH -d example.com | +| -dot | Generate the DOT output file | amass viz -dot -d example.com | | -enum | Identify an enumeration via an index from the db listing | amass viz -enum 1 -d3 -d example.com | -| -o | Path to a pre-existing directory that will hold output files | amass viz -d3 -o OUTPATH -d example.com | -| -oA | Prefix used for naming all output files | amass viz -d3 -oA example -d example.com | | -gexf | Output to Graph Exchange XML Format (GEXF) | amass viz -gexf -d example.com | | -graphistry | Output Graphistry JSON | amass viz -graphistry -d example.com | | -i | Path to the Amass data operations JSON input file | amass viz -d3 -d example.com | | -maltego | Output a Maltego Graph Table CSV file | amass viz -maltego -d example.com | - +| -o | Path to a pre-existing directory that will hold output files | amass viz -d3 -o OUTPATH -d example.com | +| -oA | Prefix used for naming all output files | amass viz -d3 -oA example -d example.com | ### The 'track' Subcommand @@ -166,10 +176,8 @@ Shows differences between enumerations that included the same target(s) for moni | Flag | Description | Example | |------|-------------|---------| -| -config | Path to the INI configuration file | amass track -config config.ini | | -d | Domain names separated by commas (can be used multiple times) | amass track -d example.com | | -df | Path to a file providing root domain names | amass track -df domains.txt | -| -dir | Path to the directory containing the graph database | amass track -dir PATH | | -history | Show the difference between all enumeration pairs | amass track -history | | -last | The number of recent enumerations to include in the tracking | amass track -last NUM | | -since | Exclude all enumerations before a specified date (format: 01/02 15:04:05 2006 MST) | amass track -since DATE | @@ -180,23 +188,18 @@ Performs viewing and manipulation of the graph database. This subcommand only le | Flag | Description | Example | |------|-------------|---------| -| -config | Path to the INI configuration file | amass db -config config.ini | | -d | Domain names separated by commas (can be used multiple times) | amass db -d example.com | | -demo | Censor output to make it suitable for demonstrations | amass db -demo -d example.com | | -df | Path to a file providing root domain names | amass db -df domains.txt | -| -dir | Path to the directory containing the graph database | amass db -dir PATH | | -enum | Identify an enumeration via an index from the listing | amass db -enum 1 -show | -| -import | Import an Amass data operations JSON file to the graph database | amass db -import PATH | | -ip | Show the IP addresses for discovered names | amass db -show -ip -d example.com | | -ipv4 | Show the IPv4 addresses for discovered names | amass db -show -ipv4 -d example.com | | -ipv6 | Show the IPv6 addresses for discovered names | amass db -show -ipv6 -d example.com | | -json | Path to the JSON output file or '-' | amass db -names -silent -json out.json -d example.com | | -list | Print enumerations in the database and filter on domains specified | amass db -list | | -names | Print just discovered names | amass db -names -d example.com | -| -nocolor | Disable colorized output | amass db -names -nocolor -d example.com | | -o | Path to the text output file | amass db -names -o out.txt -d example.com | | -show | Print the results for the enumeration index + domains provided | amass db -show | -| -silent | Disable all output during execution | amass db -names -silent -json out.json -d example.com | | -src | Print data sources for the discovered names | amass db -show -src -d example.com | | -summary | Print just ASN table summary | amass db -summary -d example.com | @@ -234,48 +237,50 @@ Note that these locations are based on the [output directory](#the-output-direct | output_directory | The directory that stores the graph database and other output files | | maximum_dns_queries | The maximum number of concurrent DNS queries that can be performed | -### The network_settings Section +### The `resolvers` Section | Option | Description | |--------|-------------| -| address | IP address or range (e.g. a.b.c.10-245) that is in scope | -| asn | ASN that is in scope | -| cidr | CIDR (e.g. 192.168.1.0/24) that is in scope | -| port | Specifies a port to be used when actively pulling TLS certificates | +| resolver | The IP address of a DNS resolver and used globally by the amass package | -### The domains Section +### The `scope` Section | Option | Description | |--------|-------------| -| domain | A root DNS domain name to be added to the enumeration scope | +| address | IP address or range (e.g. a.b.c.10-245) that is in scope | +| asn | ASN that is in scope | +| cidr | CIDR (e.g. 192.168.1.0/24) that is in scope | +| port | Specifies a port to be used when actively pulling TLS certificates or crawling | -### The resolvers Section +#### The `scope.domains` Section | Option | Description | |--------|-------------| -| resolver | The IP address of a DNS resolver and used globally by the amass package | +| domain | A root DNS domain name to be added to the enumeration scope | -### The blacklisted Section +#### The `scope.blacklisted` Section | Option | Description | |--------|-------------| | subdomain | A DNS subdomain name to be considered out of scope during the enumeration | -### The disabled_data_sources Section +### The `graphdbs` Section + +#### The `graphdbs.postgres` Section | Option | Description | |--------|-------------| -| data_source | One of the Amass data sources that is **not** to be used during the enumeration | +| primary | When set to true, the graph database is specified as the primary db | +| url | URL in the form of "postgres://[username:password@]host[:port]/database-name?sslmode=disable" where Amass will connect to a PostgreSQL database | +| options | Additional PostgreSQL database options | -### The gremlin Section +#### The `graphdbs.mysql` Section | Option | Description | |--------|-------------| -| url | URL in the form of "ws://host:port" where Amass will connect to a TinkerPop database | -| username | User of the TinkerPop database server that can access the Amass graph database | -| password | Valid password for the user identified by the 'username' option | +| url | URL in the form of "[username:password@]tcp(host[:3306])/database-name?timeout=10s" where Amass will connect to a MySQL database | -### The bruteforce Section +### The `bruteforce` Section | Option | Description | |--------|-------------| @@ -284,12 +289,11 @@ Note that these locations are based on the [output directory](#the-output-direct | minimum_for_recursive | Number of discoveries made in a subdomain before performing recursive brute forcing | | wordlist_file | Path to a custom wordlist file to be used during the brute forcing | -### The alterations Section +### The `alterations` Section | Option | Description | |--------|-------------| | enabled | When set to true, permuting resolved DNS names is performed during the enumeration | -| minimum_for_word_flip | Number of times a word must be seen before using it for future word flips and word additions | | edit_distance | Number of times an edit operation will be performed on a name sample during fuzzy label searching | | flip_words | When set to true, causes words in DNS names to be exchanged for others in the alteration word list | | flip_numbers | When set to true, causes numbers in DNS names to be exchanged for other numbers | @@ -297,11 +301,19 @@ Note that these locations are based on the [output directory](#the-output-direct | add_numbers | When set to true, causes numbers to be added and removed from resolved DNS names | | wordlist_file | Path to a custom wordlist file that provides additional words to the alteration word list | -### Data Source Sections +### The `data_sources` Section -Each Amass data source service can have a dedicated configuration file section. The section is named just as in the output from the 'amass enum -list' command. +| Option | Description | +|--------|-------------| +| ttl | The number of minutes that the responses of **all** data sources for the target are cached | + +#### The `data_sources.SOURCENAME` Section + +| Option | Description | +|--------|-------------| +| ttl | The number of minutes that the response of the data source for the target is cached | -This is how data sources can be configured that have authentication requirements. +##### The `data_sources.SOURCENAME.CREDENTIALSETID` Section | Option | Description | |--------|-------------| @@ -310,6 +322,12 @@ This is how data sources can be configured that have authentication requirements | username | User for the data source account | | password | Valid password for the user identified by the 'username' option | +#### The `data_sources.disabled` Section + +| Option | Description | +|--------|-------------| +| data_source | One of the Amass data sources that is **not** to be used during the enumeration | + ## The Graph Database All Amass enumeration findings are stored in a graph database. This database is either located in a single file within the output directory or connected to remotely using settings provided by the configuration file. From 94f1627757c3e07bcb7a179efabc0a32579c1e15 Mon Sep 17 00:00:00 2001 From: shelld3v <59408894+shelld3v@users.noreply.github.com> Date: Mon, 6 Jun 2022 17:15:34 +0700 Subject: [PATCH 15/19] Corrected Yandex's category --- README.md | 4 ++-- resources/scripts/{scrape => api}/yandex.ads | 23 +++++++++----------- 2 files changed, 12 insertions(+), 15 deletions(-) rename resources/scripts/{scrape => api}/yandex.ads (77%) diff --git a/README.md b/README.md index 3855bd227..95ea70b58 100644 --- a/README.md +++ b/README.md @@ -24,11 +24,11 @@ The OWASP Amass Project performs network mapping of attack surfaces and external | Technique | Data Sources | |:-------------|:-------------| -| APIs | 360PassiveDNS, Ahrefs, AnubisDB, BinaryEdge, BufferOver, BuiltWith, C99, Chaos, CIRCL, Cloudflare, DNSDB, DNSRepo, Detectify, FOFA, FullHunt, GitHub, GitLab, Greynoise, HackerTarget, Hunter, IntelX, LeakIX, Maltiverse, Mnemonic, PassiveTotal, PentestTools, Quake, Shodan, SonarSearch, Spamhaus, Spyse, Sublist3rAPI, ThreatBook, ThreatCrowd, ThreatMiner, Twitter, URLScan, VirusTotal, ZETAlytics, ZoomEye | +| APIs | 360PassiveDNS, Ahrefs, AnubisDB, BinaryEdge, BufferOver, BuiltWith, C99, Chaos, CIRCL, Cloudflare, DNSDB, DNSRepo, Detectify, FOFA, FullHunt, GitHub, GitLab, Greynoise, HackerTarget, Hunter, IntelX, LeakIX, Maltiverse, Mnemonic, PassiveTotal, PentestTools, Quake, Shodan, SonarSearch, Spamhaus, Spyse, Sublist3rAPI, ThreatBook, ThreatCrowd, ThreatMiner, Twitter, URLScan, VirusTotal, Yandex, ZETAlytics, ZoomEye | | Certificates | Active pulls (optional), Censys, CertSpotter, Crtsh, Digitorus, FacebookCT, GoogleCT | | DNS | Brute forcing, Reverse DNS sweeping, NSEC zone walking, Zone transfers, FQDN alterations/permutations, FQDN Similarity-based Guessing | | Routing | BGPTools, BGPView, IPdata, IPinfo, NetworksDB, RADb, Robtex, ShadowServer, TeamCymru | -| Scraping | AbuseIPDB, Ask, Baidu, Bing, DNSDumpster, DNSHistory, DuckDuckGo, Gists, HackerOne, HyperStat, IPv4Info, PKey, RapidDNS, Riddler, Searchcode, Searx, SiteDossier, Yahoo, Yandex | +| Scraping | AbuseIPDB, Ask, Baidu, Bing, DNSDumpster, DNSHistory, DuckDuckGo, Gists, HackerOne, HyperStat, IPv4Info, PKey, RapidDNS, Riddler, Searchcode, Searx, SiteDossier, Yahoo | | Web Archives | ArchiveIt, Arquivo, CommonCrawl, HAW, UKWebArchive, Wayback | | WHOIS | AlienVault, AskDNS, DNSlytics, ONYPHE, SecurityTrails, SpyOnWeb, Umbrella, WhoisXMLAPI | diff --git a/resources/scripts/scrape/yandex.ads b/resources/scripts/api/yandex.ads similarity index 77% rename from resources/scripts/scrape/yandex.ads rename to resources/scripts/api/yandex.ads index 38686f114..5354b5461 100644 --- a/resources/scripts/scrape/yandex.ads +++ b/resources/scripts/api/yandex.ads @@ -4,7 +4,7 @@ local url = require("url") name = "Yandex" -type = "scrape" +type = "api" function start() set_rate_limit(2) @@ -37,28 +37,25 @@ function vertical(ctx, domain) end local tlds = {"com", "com.tr", "ru"} - + local found = false for _, tld in pairs(tlds) do - local correct_tld = false - for i=1,10 do - local found = scrape(ctx, { - ['url']=build_url(c.username, c.key, domain, tld, i), + for i=1,20 do + local ok = scrape(ctx, { + ['url']=build_url(domain, c.username, c.key, tld, i), }) - - if not found then + if not ok then break - elseif i == 1 then - correct_tld = true end - end - if correct_tld then + found = true + end + if found then break end end end -function build_url(username, key, domain, tld, pagenum) +function build_url(domain, username, key, tld, pagenum) local query = "site:" .. domain .. " -www" local params = { ['maxpassages']=1, From 40bdd3d77b61092d0ce16d0da6f1a15029618f7a Mon Sep 17 00:00:00 2001 From: caffix Date: Thu, 23 Jun 2022 18:16:45 -0400 Subject: [PATCH 16/19] Fixes for the intel subcommand Committer: caffix --- cmd/amass/intel.go | 9 +++++++-- intel/input.go | 19 +------------------ intel/intel.go | 18 ++++++------------ 3 files changed, 14 insertions(+), 32 deletions(-) diff --git a/cmd/amass/intel.go b/cmd/amass/intel.go index b0814f143..29956daf2 100644 --- a/cmd/amass/intel.go +++ b/cmd/amass/intel.go @@ -260,7 +260,9 @@ func runIntelCommand(clArgs []string) { go func() { _ = ic.HostedDomains(ctx) }() } - processIntelOutput(ic, &args) + if !processIntelOutput(ic, &args) { + os.Exit(1) + } } func printNetblocks(asns []int, cfg *config.Config, sys systems.System) { @@ -279,7 +281,7 @@ func printNetblocks(asns []int, cfg *config.Config, sys systems.System) { } } -func processIntelOutput(ic *intel.Collection, args *intelArgs) { +func processIntelOutput(ic *intel.Collection, args *intelArgs) bool { var err error dir := config.OutputDirectory(ic.Config.Dir) @@ -303,6 +305,7 @@ func processIntelOutput(ic *intel.Collection, args *intelArgs) { _, _ = outptr.Seek(0, 0) } + var found bool // Collect all the names returned by the intelligence collection for out := range ic.Output { source, _, ips := format.OutputLineParts(out, args.Options.Sources, @@ -317,7 +320,9 @@ func processIntelOutput(ic *intel.Collection, args *intelArgs) { if outptr != nil { fmt.Fprintf(outptr, "%s%s%s\n", source, out.Domain, ips) } + found = true } + return found } // Obtain parameters from provided input files diff --git a/intel/input.go b/intel/input.go index 8f3fd410b..09ac81f1e 100644 --- a/intel/input.go +++ b/intel/input.go @@ -57,24 +57,7 @@ func (r *intelSource) Next(ctx context.Context) bool { default: } - if !r.queue.Empty() { - return true - } - - t := time.NewTimer(r.timeout) - defer t.Stop() - - for { - select { - case <-t.C: - close(r.done) - return false - case <-r.queue.Signal(): - if !r.queue.Empty() { - return true - } - } - } + return !r.queue.Empty() } // Data implements the pipeline InputSource interface. diff --git a/intel/intel.go b/intel/intel.go index eff60c5c2..db5dabc43 100644 --- a/intel/intel.go +++ b/intel/intel.go @@ -26,8 +26,8 @@ import ( ) const ( - maxDnsPipelineTasks int = 15000 - maxActivePipelineTasks int = 25 + maxDnsPipelineTasks int = 2000 + maxActivePipelineTasks int = 50 ) // Collection is the object type used to execute a open source information gathering with Amass. @@ -76,16 +76,12 @@ func (c *Collection) HostedDomains(ctx context.Context) error { return err } + defer close(c.Output) // Setup the context used throughout the collection var cancel context.CancelFunc c.ctx, cancel = context.WithCancel(ctx) defer cancel() - go func() { - <-ctx.Done() - close(c.Output) - }() - var stages []pipeline.Stage stages = append(stages, pipeline.DynamicPool("", c.makeDNSTaskFunc(), maxDnsPipelineTasks)) if c.Config.Active { @@ -104,11 +100,9 @@ func (c *Collection) HostedDomains(ctx context.Context) error { continue } - go func(n *net.IPNet) { - for _, addr := range amassnet.AllHosts(n) { - source.InputAddress(&requests.AddrRequest{Address: addr.String()}) - } - }(cidr) + for _, addr := range amassnet.AllHosts(cidr) { + source.InputAddress(&requests.AddrRequest{Address: addr.String()}) + } } return pipeline.NewPipeline(stages...).Execute(ctx, source, c.makeOutputSink()) From fccbb2d70eb931d609727a38da81a7189a11f121 Mon Sep 17 00:00:00 2001 From: caffix Date: Thu, 23 Jun 2022 18:32:25 -0400 Subject: [PATCH 17/19] Updated the license badge --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 31a8c892f..f6fa0723a 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ ![GitHub Test Status](https://github.com/OWASP/Amass/workflows/tests/badge.svg) [![GoDoc](https://pkg.go.dev/badge/github.com/OWASP/Amass/v3?utm_source=godoc)](https://pkg.go.dev/github.com/OWASP/Amass/v3) -[![License](https://img.shields.io/github/license/OWASP/Amass)](https://www.apache.org/licenses/LICENSE-2.0) +[![License](https://img.shields.io/badge/license-apache%202-blue)](https://www.apache.org/licenses/LICENSE-2.0) [![Go Report](https://goreportcard.com/badge/github.com/OWASP/Amass)](https://goreportcard.com/report/github.com/OWASP/Amass) [![CodeFactor](https://www.codefactor.io/repository/github/OWASP/Amass/badge)](https://www.codefactor.io/repository/github/OWASP/Amass) [![Maintainability](https://api.codeclimate.com/v1/badges/41c139f7cf5c23df1e58/maintainability)](https://codeclimate.com/github/OWASP/Amass/maintainability) @@ -140,7 +140,7 @@ Add it to our ever-growing list of [REFERENCES.md](REFERENCES.md) by forking and * [Capt. Meelo | Asset Enumeration: Expanding a Target's Attack Surface](https://captmeelo.com/bugbounty/2019/09/02/asset-enumeration.html) * [Noobhax | My Recon Process — DNS Enumeration](https://medium.com/@noobhax/my-recon-process-dns-enumeration-d0e288f81a8a) -## Licensing [![License](https://img.shields.io/github/license/OWASP/Amass)](https://www.apache.org/licenses/LICENSE-2.0) +## Licensing [![License](https://img.shields.io/badge/license-apache%202-blue)](https://www.apache.org/licenses/LICENSE-2.0) This program is free software: you can redistribute it and/or modify it under the terms of the [Apache license](LICENSE). OWASP Amass and any contributions are Copyright © by Jeff Foley 2017-2022. Some subcomponents have separate licenses. From 0a3c7e0bc55fe1739ca9f8ef0f5d5b4f10ea784b Mon Sep 17 00:00:00 2001 From: caffix Date: Thu, 23 Jun 2022 18:33:39 -0400 Subject: [PATCH 18/19] v3.19.3 release --- format/print.go | 2 +- snapcraft.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/format/print.go b/format/print.go index 93337d53b..d0569dbc1 100644 --- a/format/print.go +++ b/format/print.go @@ -32,7 +32,7 @@ const Banner = ` .+++:. : .+++. const ( // Version is used to display the current version of Amass. - Version = "v3.19.2" + Version = "v3.19.3" // Author is used to display the Amass Project Team. Author = "OWASP Amass Project - @owaspamass" diff --git a/snapcraft.yaml b/snapcraft.yaml index 4f9711b27..02c81d48b 100644 --- a/snapcraft.yaml +++ b/snapcraft.yaml @@ -5,7 +5,7 @@ description: | security professionals perform network mapping of attack surfaces and external asset discovery using open source information gathering and active reconnaissance techniques. -version: 'v3.19.2' +version: 'v3.19.3' icon: images/snapcraft_icon.png license: Apache-2.0 base: core20 From 21ea9f0a682b953975de7791e9bda98a97069f07 Mon Sep 17 00:00:00 2001 From: Pham Sy Minh <59408894+shelld3v@users.noreply.github.com> Date: Fri, 24 Jun 2022 22:11:44 +0700 Subject: [PATCH 19/19] Improved code readability --- resources/scripts/api/bufferover.ads | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/resources/scripts/api/bufferover.ads b/resources/scripts/api/bufferover.ads index 60a033646..f2acf9904 100644 --- a/resources/scripts/api/bufferover.ads +++ b/resources/scripts/api/bufferover.ads @@ -37,7 +37,7 @@ function commercial_api_query(ctx, domain, key) }, }) if (err ~= nil and err ~= "") then - return + return false end send_names(ctx, resp)