Skip to content

Commit

Permalink
Added Google, PSBDMP and PublicWWW as datasources
Browse files Browse the repository at this point in the history
  • Loading branch information
shelld3v committed May 13, 2022
1 parent 8a7cd5d commit 2c172ad
Show file tree
Hide file tree
Showing 5 changed files with 146 additions and 4 deletions.
6 changes: 3 additions & 3 deletions README.md
Expand Up @@ -24,12 +24,12 @@ The OWASP Amass Project performs network mapping of attack surfaces and external

| Technique | Data Sources |
|:-------------|:-------------|
| APIs | 360PassiveDNS, Ahrefs, AnubisDB, BinaryEdge, BufferOver, BuiltWith, C99, Chaos, CIRCL, Cloudflare, DNSDB, DNSRepo, Detectify, FOFA, FullHunt, GitHub, GitLab, Greynoise, HackerTarget, Hunter, IntelX, LeakIX, Maltiverse, Mnemonic, N45HT, PassiveTotal, PentestTools, Quake, Shodan, SonarSearch, Spamhaus, Spyse, Sublist3rAPI, ThreatBook, ThreatCrowd, ThreatMiner, Twitter, URLScan, VirusTotal, ZETAlytics, ZoomEye |
| APIs | 360PassiveDNS, Ahrefs, AnubisDB, BinaryEdge, BufferOver, BuiltWith, C99, Chaos, CIRCL, Cloudflare, DNSDB, DNSRepo, Detectify, FOFA, FullHunt, GitHub, GitLab, Greynoise, HackerTarget, Hunter, IntelX, LeakIX, Maltiverse, Mnemonic, N45HT, PSBDMP, PassiveTotal, PentestTools, Quake, Shodan, SonarSearch, Spamhaus, Spyse, Sublist3rAPI, ThreatBook, ThreatCrowd, ThreatMiner, Twitter, URLScan, VirusTotal, ZETAlytics, ZoomEye |
| Certificates | Active pulls (optional), Censys, CertSpotter, Crtsh, Digitorus, FacebookCT, GoogleCT |
| DNS | Brute forcing, Reverse DNS sweeping, NSEC zone walking, Zone transfers, FQDN alterations/permutations, FQDN Similarity-based Guessing |
| Routing | ARIN, BGPTools, BGPView, IPdata, IPinfo, NetworksDB, RADb, Robtex, ShadowServer, TeamCymru |
| Scraping | AbuseIPDB, Ask, Baidu, Bing, DNSDumpster, DuckDuckGo, Gists, HackerOne, HyperStat, IPv4Info, PKey, RapidDNS, Riddler, Searchcode, Searx, SiteDossier, Yahoo |
| Web Archives | ArchiveIt, Arquivo, CommonCrawl, HAW, UKWebArchive, Wayback |
| Scraping | AbuseIPDB, Ask, Baidu, Bing, DNSDumpster, DuckDuckGo, Gists, Google, HackerOne, HyperStat, IPv4Info, PKey, RapidDNS, Riddler, Searchcode, Searx, SiteDossier, Yahoo |
| Web Archives | ArchiveIt, Arquivo, CommonCrawl, HAW, PublicWWW, UKWebArchive, Wayback |
| WHOIS | AlienVault, AskDNS, DNSlytics, ONYPHE, SecurityTrails, SpyOnWeb, Umbrella, WhoisXMLAPI |

----
Expand Down
14 changes: 13 additions & 1 deletion examples/config.ini
Expand Up @@ -268,10 +268,16 @@ minimum_ttl = 1440 ; One day

# https://onyphe.io (Free)
#[data_sources.ONYPHE]
#ttl = 10080
#ttl = 4320
#[data_sources.ONYPHE.Credentials]
#apikey =

# https://psbdmp.ws (Free)
#[data_sources.PSBDMP]
#ttl = 10080
#[data_sources.PSBDMP.Credentials]
#apikey =

# https://passivetotal.com (Paid/Free-trial)
#[data_sources.PassiveTotal]
#ttl = 10080
Expand All @@ -285,6 +291,12 @@ minimum_ttl = 1440 ; One day
#[data_sources.PentestTools.Credentials]
#apikey =

# https://publicwww.com (Free)
#[data_sources.PublicWWW]
#ttl = 10080
#[data_sources.PublicWWW.Credentials]
#apikey =

# https://quake.360.cn (Paid)
#[data_sources.Quake]
#ttl = 4320
Expand Down
51 changes: 51 additions & 0 deletions resources/scripts/api/psbdmp.ads
@@ -0,0 +1,51 @@
-- Copyright 2022 Jeff Foley. All rights reserved.
-- Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file.

local json = require("json")

name = "PSBDMP"
type = "api"

function start()
set_rate_limit(1)
end

function vertical(ctx, domain)
local c
local cfg = datasrc_config()
if cfg ~= nil then
c = cfg.credentials
end

local resp, err = request(ctx, {url=search_url(domain)})
if (err ~= nil and err ~= "") then
log(ctx, "vertical request to service failed: " .. err)
return
end

send_names(ctx, resp)
if (c == nil or c.key == nil or c.key == "") then
return
end

local j = json.decode(resp)
if (j == nil or j.count == nil or j.count == 0) then
return
end

for _, dump in pairs(j.data) do
local ok = scrape(ctx, {url=dump_url(dump.id, c.key)})
if not ok then
return
end
end
end

function search_url(domain)
return "https://psbdmp.ws/api/v3/search/" .. domain
end

function dump_url(id, key)
return "https://psbdmp.ws/api/v3/dump/" .. id .. "?key=" .. key
end

40 changes: 40 additions & 0 deletions resources/scripts/crawl/publicwww.ads
@@ -0,0 +1,40 @@
-- Copyright 2022 Jeff Foley. All rights reserved.
-- Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file.

name = "PublicWWW"
type = "crawl"

function start()
set_rate_limit(1)
end

function check()
local c
local cfg = datasrc_config()
if cfg ~= nil then
c = cfg.credentials
end

if (c ~= nil and c.key ~= nil and c.key ~= "") then
return true
end
return false
end

function vertical(ctx, domain)
local c
local cfg = datasrc_config()
if cfg ~= nil then
c = cfg.credentials
end

if (c == nil or c.key == nil or c.key == "") then
return
end

scrape(ctx, {url=build_url(domain, c.key)})
end

function build_url(domain, key)
return "https://publicwww.com/websites/%22." .. domain .. "%22/?export=csvsnippetsu&key=" .. key
end
39 changes: 39 additions & 0 deletions resources/scripts/scrape/google.ads
@@ -0,0 +1,39 @@
-- Copyright 2022 Jeff Foley. All rights reserved.
-- Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file.

local url = require("url")

name = "Google"
type = "scrape"

function start()
set_rate_limit(5)
end

function vertical(ctx, domain)
for d=1,2 do
for i=0,20,10 do
local ok = scrape(ctx, {['url']=build_url(domain, d, i)})
if not ok then
log(ctx, "access to search engine is blocked")
return
end
end
end
end

function build_url(domain, depth, start)
local query = "site:" .. string.rep("*.", depth) .. domain .. " -www.*"
local params = {
['q']=query,
['start']=start,
['btnG']="Search",
['hl']="en",
['biw']="",
['bih']="",
['gbv']="1",
['filter']="0",
}

return "https://www.google.com/search?" .. url.build_query_string(params)
end

0 comments on commit 2c172ad

Please sign in to comment.