Skip to content

Commit

Permalink
ADS scripts using scrape automatically check for cached responses
Browse files Browse the repository at this point in the history
  • Loading branch information
caffix committed Jul 15, 2020
1 parent 119312c commit 177b914
Show file tree
Hide file tree
Showing 5 changed files with 48 additions and 85 deletions.
8 changes: 3 additions & 5 deletions config/statik/statik.go

Large diffs are not rendered by default.

20 changes: 20 additions & 0 deletions datasrcs/script.go
Original file line number Diff line number Diff line change
Expand Up @@ -408,3 +408,23 @@ func (s *Script) OnWhoisRequest(ctx context.Context, req *requests.WhoisRequest)
fmt.Sprintf("%s: horizontal callback: %v", s.String(), err))
}
}

func (s *Script) getCachedResponse(url string, ttl int) (string, error) {
for _, db := range s.sys.GraphDatabases() {
if resp, err := db.GetSourceData(s.String(), url, ttl); err == nil {
// Allow the data source to accept another request immediately on cache hits
s.ClearLast()
return resp, err
}
}

return "", fmt.Errorf("Failed to obtain a cached response for %s", url)
}

func (s *Script) setCachedResponse(url, resp string) error {
for _, db := range s.sys.GraphDatabases() {
db.CacheSourceData(s.String(), s.SourceType, url, resp)
}

return nil
}
39 changes: 23 additions & 16 deletions datasrcs/script_exports.go
Original file line number Diff line number Diff line change
Expand Up @@ -534,14 +534,27 @@ func (s *Script) scrape(L *lua.LState) int {
id, _ := getStringField(L, opt, "id")
pass, _ := getStringField(L, opt, "pass")

page, err := http.RequestWebPage(url, nil, headers, id, pass)
if err != nil {
bus.Publish(requests.LogTopic, eventbus.PriorityHigh, fmt.Sprintf("%s: %s: %v", s.String(), url, err))
L.Push(lua.LFalse)
return 1
var resp string
// Check for cached responses first
api := s.sys.Config().GetAPIKey(s.String())
if api != nil && api.TTL > 0 {
if r, err := s.getCachedResponse(url, api.TTL); err == nil {
resp = r
}
}

for _, n := range subRE.FindAllString(page, -1) {
if resp == "" {
resp, err := http.RequestWebPage(url, nil, headers, id, pass)
if err != nil {
bus.Publish(requests.LogTopic, eventbus.PriorityHigh, fmt.Sprintf("%s: %s: %v", s.String(), url, err))
L.Push(lua.LFalse)
return 1
}

s.setCachedResponse(url, resp)
}

for _, n := range subRE.FindAllString(resp, -1) {
name := cleanName(n)

if domain := cfg.WhichDomain(name); domain != "" {
Expand Down Expand Up @@ -632,13 +645,9 @@ func (s *Script) obtainResponse(L *lua.LState) int {
return 1
}

for _, db := range s.sys.GraphDatabases() {
if resp, err := db.GetSourceData(s.String(), url, ttl); err == nil {
// Allow the data source to accept another request immediately on cache hits
s.ClearLast()
L.Push(lua.LString(resp))
return 1
}
if resp, err := s.getCachedResponse(url, ttl); err == nil {
L.Push(lua.LString(resp))
return 1
}

L.Push(lua.LNil)
Expand All @@ -659,8 +668,6 @@ func (s *Script) cacheResponse(L *lua.LState) int {
return 0
}

for _, db := range s.sys.GraphDatabases() {
db.CacheSourceData(s.String(), s.SourceType, string(u), string(resp))
}
s.setCachedResponse(string(u), string(resp))
return 0
}
33 changes: 1 addition & 32 deletions resources/scripts/api/bufferover.ads
Original file line number Diff line number Diff line change
Expand Up @@ -9,40 +9,9 @@ function start()
end

function vertical(ctx, domain)
local resp
local vurl = buildurl(domain)
-- Check if the response data is in the graph database
if (api ~= nil and api.ttl ~= nil and api.ttl > 0) then
resp = obtain_response(vurl, api.ttl)
end

if (resp == nil or resp == "") then
local err

resp, err = request({url=vurl})
if (err ~= nil and err ~= "") then
return
end

if (api ~= nil and api.ttl ~= nil and api.ttl > 0) then
cache_response(vurl, resp)
end
end

sendnames(ctx, resp)
scrape(ctx, {url=buildurl(domain)})
end

function buildurl(domain)
return "https://dns.bufferover.run/dns?q=." .. domain
end

function sendnames(ctx, content)
local names = find(content, subdomainre)
if names == nil then
return
end

for i, v in pairs(names) do
newname(ctx, v)
end
end
33 changes: 1 addition & 32 deletions resources/scripts/api/hackertarget.ads
Original file line number Diff line number Diff line change
Expand Up @@ -11,27 +11,7 @@ function start()
end

function vertical(ctx, domain)
local resp
local vurl = buildurl(domain)
-- Check if the response data is in the graph database
if (api ~= nil and api.ttl ~= nil and api.ttl > 0) then
resp = obtain_response(vurl, api.ttl)
end

if (resp == nil or resp == "") then
local err

resp, err = request({url=vurl})
if (err ~= nil and err ~= "") then
return
end

if (api ~= nil and api.ttl ~= nil and api.ttl > 0) then
cache_response(vurl, resp)
end
end

sendnames(ctx, resp)
scrape(ctx, {url=buildurl(domain)})
end

function buildurl(domain)
Expand Down Expand Up @@ -75,14 +55,3 @@ end
function asnurl(addr)
return "https://api.hackertarget.com/aslookup/?q=" .. addr
end

function sendnames(ctx, content)
local names = find(content, subdomainre)
if names == nil then
return
end

for i, v in pairs(names) do
newname(ctx, v)
end
end

0 comments on commit 177b914

Please sign in to comment.