forked from unstppbl/gowap
-
Notifications
You must be signed in to change notification settings - Fork 0
/
scraper.go
53 lines (47 loc) · 1.33 KB
/
scraper.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
package scraper
import (
"net"
"net/url"
"strings"
)
type ScrapedURL struct {
URL string `json:"url,omitempty"`
Status int `json:"status,omitempty"`
}
type ScrapedData struct {
URLs ScrapedURL
HTML string
Headers map[string][]string
Scripts []string
Cookies map[string]string
Meta map[string][]string
DNS map[string][]string
CertIssuer []string
}
// Scraper is an interface for different scrapping brower (colly, rod)
type Scraper interface {
Init(url string) error
CanRenderPage() bool
Scrape(paramURL string) (*ScrapedData, error)
EvalJS(jsProp string) (*string, error)
SetDepth(depth int)
}
func scrapeDNS(paramURL string) map[string][]string {
scrapedDNS := make(map[string][]string)
u, _ := url.Parse(paramURL)
parts := strings.Split(u.Hostname(), ".")
domain := parts[len(parts)-2] + "." + parts[len(parts)-1]
nsSlice, _ := net.LookupNS(domain)
for _, ns := range nsSlice {
scrapedDNS["NS"] = append(scrapedDNS["NS"], string(ns.Host))
}
mxSlice, _ := net.LookupMX(domain)
for _, mx := range mxSlice {
scrapedDNS["MX"] = append(scrapedDNS["MX"], string(mx.Host))
}
txtSlice, _ := net.LookupTXT(domain)
scrapedDNS["TXT"] = append(scrapedDNS["TXT"], txtSlice...)
cname, _ := net.LookupCNAME(domain)
scrapedDNS["CNAME"] = append(scrapedDNS["CNAME"], cname)
return scrapedDNS
}