diff --git a/README.MD b/README.MD index e6cb6bb..6c4fcc5 100644 --- a/README.MD +++ b/README.MD @@ -1,5 +1,5 @@ ## w9scan -这款扫描器将不仅仅只能调用bugscan插件,将不断升级扫描器,结合github上各种扫描器优秀项目,做一个`niub`的扫描器!环境只需要python 2.7即可,且不需要安装其他第三方库,支持Windos/Linux系统。如果觉得本程序能帮助到你欢迎点个`Star`,如果不参与开发,请不要点击`Fork`。 +这款扫描器将不仅仅只能调用bugscan插件,会不断升级,结合github上各种扫描器优秀项目,做一个`niub`的扫描器!环境只需要python 2.7 即可,且不需要安装其他第三方库,支持Windos/Linux系统。如果觉得本程序能帮助到你欢迎点个`Star`,如果不参与开发,请不要点击`Fork`。 #### 目标 - Linux/Windos通用 @@ -7,7 +7,7 @@ - 做开源扫描器中的 Top1 ## 升级记录 -- 接下来要做的: 子域名爆破/探测并加入扫描 bug反馈机制 生成HTML格式扫描报告 扫描器架构简介,插件编写指南 +- 接下来要做的: 子域名爆破/探测并加入扫描 报错机制 bug反馈机制 生成HTML格式扫描报告 扫描器架构简介,插件编写指南 - 1.4.3 加入了WAF/CDN探测模块`waf_identify.py` 感谢[WebEye](https://github.com/zerokeeper/WebEye/)的代码以及指纹信息 - 1.4.2 加入爬虫的备份文件探测模块 参考[bcrpscan](https://github.com/secfree/bcrpscan) diff --git a/lib/core/common.py b/lib/core/common.py index 34f3f03..cc4bc27 100644 --- a/lib/core/common.py +++ b/lib/core/common.py @@ -1,5 +1,5 @@ #!/usr/bin/env python - +#coding:utf-8 from lib.core.data import paths import sys import os @@ -7,6 +7,7 @@ from lib.core.settings import banner as banner1 from lib.core.log import logger import urlparse +import urllib2 """ Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission @@ -111,4 +112,4 @@ def Get_lineNumber_fileName(): return '%s:%d <= %s:%d' % (f_co_name, f_line, ff_co_name, - ff_line) \ No newline at end of file + ff_line) diff --git a/lib/core/exploit.py b/lib/core/exploit.py index 29c39dc..534f1ea 100644 --- a/lib/core/exploit.py +++ b/lib/core/exploit.py @@ -129,7 +129,7 @@ def _work(self,threadConf): pluginObj.audit(threadConf["agrs"]) except Exception as error_info: - self._print( "[!!!] ",threadConf["service"], threadConf["filename"],error_info) + self._print( "[!!!] service:%s filename:%s error_info:%s"%(threadConf["service"], threadConf["filename"],error_info)) def _security_note(self, body, uuid=None): self.lock_output.acquire() diff --git a/lib/utils/crawler.py b/lib/utils/crawler.py index 7ed4a70..f1730b2 100644 --- a/lib/utils/crawler.py +++ b/lib/utils/crawler.py @@ -6,6 +6,7 @@ import re from thirdparty import hackhttp from lib.core.data import w9_hash_pycode +from lib.utils import until req = hackhttp.hackhttp() @@ -50,13 +51,11 @@ def craw(self): new_url = self.urls.get_new_url() print("craw:" + new_url) try: - code, head, html, redirect_url, log = req.http(new_url) + html = until.w9_get(new_url) check(new_url,html) except Exception as errinfo: print "[xxx] spider request error:",errinfo - code = 0 html = '' - if code != 200: continue new_urls = self._parse(new_url, html) self.urls.add_new_urls(new_urls) self.deep = self.deep + 1 @@ -103,7 +102,7 @@ def _get_new_urls(self, page_url, links): new_urls.add(new_full_url) return new_urls -def check(url,html): +def check(url,html = ''): for k, v in w9_hash_pycode.iteritems(): try: pluginObj = v["pluginObj"] diff --git a/lib/utils/until.py b/lib/utils/until.py index 3d8d8da..f6c5872 100644 --- a/lib/utils/until.py +++ b/lib/utils/until.py @@ -16,6 +16,12 @@ _unreserved_marks = "-_.!~*'()" _safe_chars = urllib.always_safe + '%' + _reserved + _unreserved_marks +def w9_get(url): + # w9scan function for get requests + req = urllib2.Request(url) + req.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.2; rv:16.0) Gecko/20100101 Firefox/16.0') + s = urllib2.urlopen(req).read() + return s def is_ipaddr(varObj): """ diff --git a/lib/utils/until.pyc b/lib/utils/until.pyc index f2e9366..448d4a0 100644 Binary files a/lib/utils/until.pyc and b/lib/utils/until.pyc differ diff --git a/plugins/ssl/104.py b/plugins/ssl/104.py index 9a7b59a..dbb38dc 100644 --- a/plugins/ssl/104.py +++ b/plugins/ssl/104.py @@ -86,7 +86,8 @@ def assign(service, arg): def audit(arg): - host, port = arg + host = arg + port = 443 try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((host, port)) diff --git a/plugins/www/3338.py b/plugins/www/3338.py index a27773f..c97fff7 100644 --- a/plugins/www/3338.py +++ b/plugins/www/3338.py @@ -1,6 +1,11 @@ #!/usr/bin/evn python +# -*- coding: utf-8 -*- import socket import urlparse +import json +import sys +reload(sys) +sys.setdefaultencoding('utf-8') def assign(service, arg): if service == 'www': @@ -9,8 +14,20 @@ def assign(service, arg): return True, hostname def audit(arg): - security_info("IP:" + arg) + url = "http://ip.taobao.com/service/getIpInfo.php?ip=%s" % arg + s = util.w9_get(url) + jsondata = json.loads(s) + if jsondata['code'] == 1: + jsondata['data'] = {'region': '', 'city': '', 'isp': ''} + else: + security_info("Region:" + jsondata['data']['region']) + security_info("ISP:" + jsondata['data']['isp']) + security_info("City:" + jsondata['data']['city']) + security_info("IP Address:" + arg) task_push("ip",arg) + # Get IP Address if __name__ == "__main__": - print assign("www","https://blog.hacking8.com") + from dummy import * + # print assign("www","https://blog.hacking8.com") + audit("47.52.234.181") diff --git a/plugins/www/807.py b/plugins/www/807.py index 98d4ec1..bed679c 100644 --- a/plugins/www/807.py +++ b/plugins/www/807.py @@ -16,7 +16,7 @@ def assign(service, arg): def audit(arg): payload = 'solr/#/' url = arg + payload - code, head, res, errcode, _ = curl.curl('"%s"' % url) + code, head, res, errcode, _ = curl.curl("%s" % url) if code == 200 and 'Apache SOLR' in res : security_info(url) diff --git a/test.py b/test.py index 916e24d..8c289b7 100644 --- a/test.py +++ b/test.py @@ -1,16 +1,9 @@ # coding:utf-8 +import urlparse +from dummy import * import urlparse -u = "http://testphp.vulnweb.com/listproducts.php?artist=1&asfss=www" -parse = urlparse.urlparse(u) -print parse -if not parse.query: - pass - - - +u = "https://bbs.125.la/?post=1" -for i in parse.query.split('&'): - k,v = i.split('=') - print k,is_number(v) +print urlparse.urlparse(u) diff --git a/w9scan.py b/w9scan.py index 41dacdd..a02c603 100644 --- a/w9scan.py +++ b/w9scan.py @@ -13,7 +13,7 @@ from lib.core.common import banner from lib.core.log import logger import os -import inspect +import inspect,time from distutils.version import LooseVersion from lib.core.settings import VERSION from lib.core.data import urlconfig @@ -78,13 +78,14 @@ def main(): e.run() e.init_spider() s = crawler.SpiderMain(urlconfig.url) + time.sleep(0.5) s.craw() logger.report() except KeyboardInterrupt: logger.critical("[***] User Interrupt") exit() except Exception as info: - print "[xxx] MainError",info + print "[xxx] MainError",Exception,":",info exit()