Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support recent spiderfoot releases #26

Merged
merged 3 commits into from Sep 17, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 2 additions & 0 deletions plugins/testing/recon/spiderfoot.golismero
Expand Up @@ -14,3 +14,5 @@ modulelist = sfp_bingsearch,sfp_crossref,sfp_dns,sfp_email,sfp_googlesearch,sfp_
typelist = AFFILIATE_DOMAIN,AFFILIATE,AFFILIATE_IPADDR,AFFILIATE_WEB_CONTENT,CO_HOSTED_SITE,EMAILADDR,PROVIDER_MAIL,PROVIDER_JAVASCRIPT,WEBSERVER_HTTPHEADERS,HTTP_CODE,IP_ADDRESS,LINKED_URL_EXTERNAL,LINKED_URL_INTERNAL,MALICIOUS_AFFILIATE,MALICIOUS_AFFILIATE_IPADDR,MALICIOUS_COHOST,MALICIOUS_IPADDR,MALICIOUS_SUBDOMAIN,PROVIDER_DNS,WEBSERVER_STRANGEHEADER,TCP_PORT_OPEN,SSL_CERTIFICATE_EXPIRED,SSL_CERTIFICATE_MISMATCH,SUBDOMAIN,TARGET_WEB_CONTENT,URL_PASSWORD,URL_UPLOAD,URL_FORM,URL_STATIC,URL_FLASH,URL_JAVA_APPLET,URL_JAVASCRIPT,INITIAL_TARGET,WEBSERVER_BANNER,
interval = 1.0
delete = yes
usecase = all
create_checks = 60
28 changes: 20 additions & 8 deletions plugins/testing/recon/spiderfoot.py
Expand Up @@ -125,6 +125,7 @@ def run(self, info):
"scantarget": info.hostname,
"modulelist": self.get_list("modulelist", "module_"),
"typelist": self.get_list("typelist", "type_"),
"usecase": Config.plugin_args.get("usecase", "all")
})
if resp.status_code != 200:
r = resp.content
Expand All @@ -144,8 +145,11 @@ def run(self, info):
last_msg = ""
is_created = False
scan_id = None
create_checks = Config.plugin_args.get("create_checks",60)
checks = 0
while True:
resp = get(url_scanlist)
checks += 1
if resp.status_code != 200:
status = "ERROR-FAILED"
break
Expand All @@ -172,6 +176,12 @@ def run(self, info):
else:
if not is_created:
Logger.log_verbose("Status: CREATING")
if checks == create_checks:
Logger.log_error(
"Scan not found within %s checks, \
aborting!" % create_checks
)
return
else:
Logger.log_verbose("Status: DELETED")
Logger.log_error(
Expand Down Expand Up @@ -340,7 +350,7 @@ def parse(self, fd):

# Make sure the file format is correct.
assert iterable.next() == [
"Updated", "Type", "Module", "Source", "Data"
"Updated", "Type", "Module", "Source", "F/P", "Data"
], "Unsupported file format!"

# For each row...
Expand All @@ -350,9 +360,9 @@ def parse(self, fd):
continue

# Split the row into its columns.
assert len(row) == 5, "Broken CSV file! " \
assert len(row) == 6, "Broken CSV file! " \
"This may happen when using an old version of SpiderFoot."
_, sf_type, sf_module, source, raw_data = row
_, sf_type, sf_module, source, _, raw_data = row

# Call the parser method for this data type, if any.
method = getattr(self, "sf_" + sf_type, self.sf_null)
Expand All @@ -367,6 +377,7 @@ def parse(self, fd):
Logger.log_error_verbose(str(e))
Logger.log_error_more_verbose(tb)


# Reconstruct the suspicious header vulnerabilities.
for url, headers in self.strange_headers.iteritems():
try:
Expand All @@ -377,8 +388,8 @@ def parse(self, fd):
vulnerability = SuspiciousHeader(resp, name, value)
self.__add_partial_results((vulnerability,))
elif warn_data_lost:
warn("Missing information in SpiderFoot results,"
" some data may be lost", RuntimeError)
warn("Missing information in SpiderFoot results, \
some data may be lost")
warn_data_lost = False
except Exception, e:
tb = format_exc()
Expand All @@ -393,16 +404,17 @@ def parse(self, fd):
self.reconstruct_http_headers or
self.reconstruct_http_data
):
warn("Missing information in SpiderFoot results,"
" some data may be lost", RuntimeError)
warn("Missing information in SpiderFoot results, \
some data may be lost")
warn_data_lost = False
self.reconstruct_http_code.clear()
self.reconstruct_http_headers.clear()
self.reconstruct_http_data.clear()
self.reconstructed_http.clear()

# Reconstruct the port scans.
for address, ports in self.port_scan:
for address in self.port_scan:
ports = self.port_scan[address]
try:
ip = IP(address)
ps = Portscan(ip, (("OPEN", "TCP", port) for port in ports))
Expand Down