-
Notifications
You must be signed in to change notification settings - Fork 0
/
scanner.py
103 lines (81 loc) · 3.83 KB
/
scanner.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
#!/usr/bin/env python3
'''
XSS Vulnerability scanner.
This tool searches for XSS vulnerabilities on target URL.
The best way to test it is to spawn your own Lab environment using VirtualBox and Metasploitable 2 virtual machine.
Evgeni Semenov, dev@safemail.sbs
'''
import requests
import re
import urllib.parse as urlparse
from bs4 import BeautifulSoup
class Scanner:
def __init__(self, url, ignore_links):
self.session = requests.Session()
self.target_url = url
self.target_links = []
self.links_to_ingore = ignore_links
def crawl(self, url=None):
if url == None:
url = self.target_url
result = self.session.get(url)
href_links = re.findall('(?:href=")(.*?)"', result.content.decode(errors="ignore"))
for link in href_links:
link = urlparse.urljoin(url, link)
if "#" in link:
link = link.split("#")[0]
if self.target_url in link and link not in self.target_links and link not in self.links_to_ingore:
self.target_links.append(link)
print(link)
self.crawl(link)
def extract_forms(self, url):
response = self.session.get(url)
parsed_html = BeautifulSoup(response.content, features="html.parser")
return parsed_html.findAll("form")
def submit_form(self, form, value, url):
action = form.get("action")
post_url = urlparse.urljoin(url, action)
method = form.get("method")
inputs_list = form.findAll("input")
post_data = {}
for input in inputs_list:
input_name = input.get("name")
input_type = input.get("type")
input_value = input.get("value")
if input_type == "text":
input_value = value
post_data[input_name] = input_value
if method == "post":
self.session.post(post_url, data=post_data)
return self.session.get(post_url, params=post_data)
def run_scanner(self):
for link in self.target_links:
forms = self.extract_forms(link)
for form in forms:
print("[*] Testing form in "+link)
is_vulnerable_to_xss = self.test_xss_in_form(form, link)
if is_vulnerable_to_xss:
print("\n\n[***] XSS vulnerability discovered " +link+" in the following form")
print(form)
if "=" in link:
print("[+] Testing "+link)
is_vulnerable_to_xss = self.test_xss_in_link(link)
if is_vulnerable_to_xss:
print("\n\n[***] XSS vulnerability discovered in "+link)
def test_xss_in_link(self, url):
xss_test_script = "<sCript>alert('Hacked!')</scriPt>"
url = url.replace("=", "="+xss_test_script)
response = self.session.get(url)
return xss_test_script in response.content.decode(errors="ignore")
def test_xss_in_form(self, form, url):
xss_test_script = "<sCript>alert('Hacked!')</scriPt>"
response = self.submit_form(form, xss_test_script, url)
return xss_test_script in response.content.decode(errors="ignore")
if __name__ == "__main__":
target_url = "http://10.0.2.7/dvwa/" # Target URL here. I am testing against my Metasploitable 2 VM.
links_to_ignore = ["http://10.0.2.7/dvwa/logout.php"] # Links that you want to exclude from crawling
data_dict = {"username": "admin", "password": "password", "Login": "submit"} # username/password dictionary
vuln_scanner = Scanner(target_url, links_to_ignore) # initiating the scanner
vuln_scanner.session.post("http://10.0.2.7/dvwa/login.php", data=data_dict) # initiating session and logging in
vuln_scanner.crawl() # crawling the target_url first
vuln_scanner.run_scanner() # running scanner to search for XSS vulnerabilities