-
Notifications
You must be signed in to change notification settings - Fork 5
/
webscan.py
executable file
·361 lines (311 loc) · 7.42 KB
/
webscan.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
#
# author: Brock | 老妖(laoyaogg@qq.com)
# date: 2014-11-15
# ver: 0.5
#
import sys, os
reload(sys)
sys.path.append('utils')
import urllib2
import cookielib
import re
import random
#from lxml import etree
import time
import getopt
#import errno
#import pdb
import urlparse
import types
import locale
#import socket
import string
import codecs
from utils import webutils
import importlib
checkAll = False
verbose = False
config = './config'
scanWait = 0
scanType = 0 # 0 list, 1 crawler
scanDepth = 2
saveCookie = False
cookie = ''
postData = ''
headers = {}
searchCount = -1
googleWhat = ''
ofile = sys.stdout
notFoundInfo = u'Page Not Found|页面没有找到|找不到页面|页面不存在|^Unknown$|^Bad Request$'
def log(str):
str += '\n'
ofile.write(str)
ofile.flush()
if ofile != sys.stdout:
sys.stdout.write(str)
class Scanner:
_opener = None
_testers = []
_progress = False
_user_agent = 'Firefox'
def __init__(self):
pass
@staticmethod
def log(s):
log(s)
def report(self, url, msg):
if self._progress:
sys.stdout.write('\n')
self._progress = False
log('=' * 60)
log('[URL] ' + url)
log('[MESSAGE]')
log(msg)
log('=' * 60)
@staticmethod
def isNotFoundPage(html):
return re.search(notFoundInfo, html, re.IGNORECASE) != None
@staticmethod
def isCheckAll():
return checkAll
@staticmethod
def isVerbose():
return verbose
@staticmethod
def getCookie():
return cookie
@staticmethod
def getData():
return postData
@staticmethod
def setupCookie(req):
if len(cookie) > 0:
request.add_header('Cookie', cookie)
@staticmethod
def getHeaders():
return headers
def getOpener(self):
return self._opener
def sendReq(self, request, data = None, cookie = '', timeout = 15):
webutils.setupRequest(request)
if len(cookie) > 0:
request.add_header('Cookie', cookie)
try:
response = self._opener.open(request, data = data, timeout = timeout)
if scanWait > 0:
time.sleep(scanWait)
except urllib2.HTTPError, e:
#print e, type(e), dir(e), e.code
if scanWait > 0:
time.sleep(scanWait)
if e.code != 404:
return e.msg
else:
return "Page Not Found"
except Exception,e:
if verbose:
log('Exception: ' + repr(e) + ' at :' + request.get_full_url())
return None
except:
return None
return response
def getUrls(self):
return ()
def scanUrl(self, url):
if verbose:
log("=>" + url)
reported = False
for tester in self._testers:
if tester.scan(url, self):
reported = True
if not verbose and not reported:
sys.stdout.write('.')
sys.stdout.flush()
self._progress = True
def scan(self):
self._opener = urllib2.build_opener()
webutils.setupOpener(self._opener)
#records = set()
urls = self.getUrls()
for url in urls:
#if not url in records:
# records.add(url)
self.scanUrl(url)
return True
class ListScanner(Scanner):
def __init__(self, hostRoot, fileName):
self._hostRoot = hostRoot
self._fileName = fileName
def getUrls(self):
for uri in open(self._fileName).readlines():
uri = urllib2.quote(uri.strip())
if len(uri) <= 0:
continue
if uri[0] != '/':
uri = '/' + uri
url = self._hostRoot + uri
yield url
class SingleScanner(Scanner):
def __init__(self, url):
self._url = url
def getUrls(self):
yield self._url
from utils import crawler
class CrawlerScanner(Scanner):
def __init__(self, startUrl):
self._startUrl = startUrl
def getUrls(self):
myCrawler = crawler.Crawler()
for url in myCrawler.crawl(self._opener, self._startUrl):
yield url
from utils import google
class GoogleScanner(Scanner):
def __init__(self, keyword):
self._keyword = keyword
def getUrls(self):
gen = google.google(self._opener, self._keyword, searchCount)
for url in gen:
yield url
def loadTester(scanner, names):
mods = []
for name in names:
if len(name.strip()) <= 0 or name == '__init__':
continue
m = importlib.import_module('tester.' + name)
if m == None:
print 'cannot load tester: ' + name
sys.exit(-1)
scanner._testers.append(m)
def getAllTester():
ls = os.listdir('./tester')
result = ''
for path in ls:
if path[-3:] == '.py':
result += path[:-3] + ','
return result
#####################################################################
if __name__ == "__main__":
def usage():
helpMsg = sys.argv[0] + """ [opt] host
-a show all exist page
-d <depth> scanning depth
-e add custom error message
-f config file. default ./config
-h show help message
-k <cookie> set cookie
-n <keyword> 'page not found' filter
-N <keyword> extra 'page not found' filter
-o output file
-p <search result count> default 100
-s save cookie
-t <scanType> 0 list, 1 crawler, 2 google, 3 url. default 0
-v verbose
-w wait time."""
print helpMsg
sys.exit(0)
reload(sys)
sys.setdefaultencoding(locale.getpreferredencoding())
opts, args = getopt.getopt(sys.argv[1:], "aAd:D:e:f:hH:k:m:n:N:o:st:vw:")
#print opts
#print args
outfile = ''
testerMods = ''
for op, value in opts:
if op == '-a':
checkAll = True
if op == '-A':
checkAll = False
elif op == '-d':
scanDepth = int(value)
elif op == '-D':
postData = value
elif op == "-e":
results.append(value)
elif op == "-f":
config = value
#elif op == '-g':
# googleWhat = value
elif op == "-h":
usage()
elif op == '-H':
k, v = value.split(':')
headers[k.strip()]= v.strip()
elif op == '-k':
cookie = value
elif op == '-m':
testerMods = value
elif op == '-n':
notFoundInfo = value.decode(locale.getpreferredencoding())
elif op == '-N':
notFoundInfo += '|' + value.decode(locale.getpreferredencoding())
elif op == '-o':
outfile = value
elif op == '-p':
searchCount = int(value)
elif op == '-s':
saveCookie = True
elif op == '-t':
scanType = int(value)
if scanType == 2:
saveCookie = True
elif op == '-v':
verbose = True
elif op == "-w":
scanWait = float(value)
try:
if len(outfile) > 0:
ofile = open(outfile, "w")
except:
print 'cannot open: ' + outfile
sys.exit(-1)
if len(args) <= 0:
usage()
sys.exit(0)
if testerMods == 'all':
testerMods = getAllTester()
if scanType == 0:
if testerMods == '':
testerMods = 'simple'
urlRoot = args[0]
if not re.search(r'^http://', urlRoot, re.IGNORECASE):
urlRoot = 'http://' + urlRoot
checkAll = True
if os.path.isdir(config):
if config[-1] != '/':
config += '/'
ls = os.listdir(config)
for path in ls:
if re.search('\.txt$', path, re.IGNORECASE):
log('List scanning: [' + urlRoot + " " + config + path + ']')
scanner = ListScanner(urlRoot, config + path)
loadTester(scanner, testerMods.split(','))
scanner.scan()
else:
scanner = ListScanner(urlRoot, config)
loadTester(scanner, testerMods.split(','))
scanner.scan()
elif scanType == 1:
if testerMods == '':
testerMods = 'simple'
urlRoot = args[0]
if not re.search(r'^http://', urlRoot, re.IGNORECASE):
urlRoot = 'http://' + urlRoot
scanner = CrawlerScanner(urlRoot)
loadTester(scanner, testerMods.split(','))
scanner.scan()
elif scanType == 2:
if testerMods == '':
testerMods = 'simple'
keyword = args[0]
scanner = GoogleScanner(keyword)
loadTester(scanner, testerMods.split(','))
scanner.scan()
elif scanType == 3:
if testerMods == '':
testerMods = 'simple'
url = args[0]
scanner = SingleScanner(url)
loadTester(scanner, testerMods.split(','))
scanner.scan()