Skip to content

Commit

Permalink
对一些小细节做出了处理
Browse files Browse the repository at this point in the history
  • Loading branch information
boy-hack committed Jan 4, 2018
1 parent 51ba411 commit 9e91341
Show file tree
Hide file tree
Showing 11 changed files with 44 additions and 26 deletions.
4 changes: 2 additions & 2 deletions README.MD
@@ -1,13 +1,13 @@
## w9scan
这款扫描器将不仅仅只能调用bugscan插件,将不断升级扫描器,结合github上各种扫描器优秀项目,做一个`niub`的扫描器!环境只需要python 2.7即可,且不需要安装其他第三方库,支持Windos/Linux系统。如果觉得本程序能帮助到你欢迎点个`Star`,如果不参与开发,请不要点击`Fork`
这款扫描器将不仅仅只能调用bugscan插件,会不断升级,结合github上各种扫描器优秀项目,做一个`niub`的扫描器!环境只需要python 2.7 即可,且不需要安装其他第三方库,支持Windos/Linux系统。如果觉得本程序能帮助到你欢迎点个`Star`,如果不参与开发,请不要点击`Fork`

#### 目标
- Linux/Windos通用
- python不额外安装第三方库
- 做开源扫描器中的 Top1

## 升级记录
- 接下来要做的: 子域名爆破/探测并加入扫描 bug反馈机制 生成HTML格式扫描报告 扫描器架构简介,插件编写指南
- 接下来要做的: 子域名爆破/探测并加入扫描 报错机制 bug反馈机制 生成HTML格式扫描报告 扫描器架构简介,插件编写指南

- 1.4.3 加入了WAF/CDN探测模块`waf_identify.py` 感谢[WebEye](https://github.com/zerokeeper/WebEye/)的代码以及指纹信息
- 1.4.2 加入爬虫的备份文件探测模块 参考[bcrpscan](https://github.com/secfree/bcrpscan)
Expand Down
5 changes: 3 additions & 2 deletions lib/core/common.py
@@ -1,12 +1,13 @@
#!/usr/bin/env python

#coding:utf-8
from lib.core.data import paths
import sys
import os
from lib.core.settings import INVALID_UNICODE_CHAR_FORMAT
from lib.core.settings import banner as banner1
from lib.core.log import logger
import urlparse
import urllib2
"""
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
Expand Down Expand Up @@ -111,4 +112,4 @@ def Get_lineNumber_fileName():
return '%s:%d <= %s:%d' % (f_co_name,
f_line,
ff_co_name,
ff_line)
ff_line)
2 changes: 1 addition & 1 deletion lib/core/exploit.py
Expand Up @@ -129,7 +129,7 @@ def _work(self,threadConf):
pluginObj.audit(threadConf["agrs"])

except Exception as error_info:
self._print( "[!!!] ",threadConf["service"], threadConf["filename"],error_info)
self._print( "[!!!] service:%s filename:%s error_info:%s"%(threadConf["service"], threadConf["filename"],error_info))

def _security_note(self, body, uuid=None):
self.lock_output.acquire()
Expand Down
7 changes: 3 additions & 4 deletions lib/utils/crawler.py
Expand Up @@ -6,6 +6,7 @@
import re
from thirdparty import hackhttp
from lib.core.data import w9_hash_pycode
from lib.utils import until

req = hackhttp.hackhttp()

Expand Down Expand Up @@ -50,13 +51,11 @@ def craw(self):
new_url = self.urls.get_new_url()
print("craw:" + new_url)
try:
code, head, html, redirect_url, log = req.http(new_url)
html = until.w9_get(new_url)
check(new_url,html)
except Exception as errinfo:
print "[xxx] spider request error:",errinfo
code = 0
html = ''
if code != 200: continue
new_urls = self._parse(new_url, html)
self.urls.add_new_urls(new_urls)
self.deep = self.deep + 1
Expand Down Expand Up @@ -103,7 +102,7 @@ def _get_new_urls(self, page_url, links):
new_urls.add(new_full_url)
return new_urls

def check(url,html):
def check(url,html = ''):
for k, v in w9_hash_pycode.iteritems():
try:
pluginObj = v["pluginObj"]
Expand Down
6 changes: 6 additions & 0 deletions lib/utils/until.py
Expand Up @@ -16,6 +16,12 @@
_unreserved_marks = "-_.!~*'()"
_safe_chars = urllib.always_safe + '%' + _reserved + _unreserved_marks

def w9_get(url):
# w9scan function for get requests
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.2; rv:16.0) Gecko/20100101 Firefox/16.0')
s = urllib2.urlopen(req).read()
return s

def is_ipaddr(varObj):
"""
Expand Down
Binary file modified lib/utils/until.pyc
Binary file not shown.
3 changes: 2 additions & 1 deletion plugins/ssl/104.py
Expand Up @@ -86,7 +86,8 @@ def assign(service, arg):


def audit(arg):
host, port = arg
host = arg
port = 443
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, port))
Expand Down
21 changes: 19 additions & 2 deletions plugins/www/3338.py
@@ -1,6 +1,11 @@
#!/usr/bin/evn python
# -*- coding: utf-8 -*-
import socket
import urlparse
import json
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

def assign(service, arg):
if service == 'www':
Expand All @@ -9,8 +14,20 @@ def assign(service, arg):
return True, hostname

def audit(arg):
security_info("IP:" + arg)
url = "http://ip.taobao.com/service/getIpInfo.php?ip=%s" % arg
s = util.w9_get(url)
jsondata = json.loads(s)
if jsondata['code'] == 1:
jsondata['data'] = {'region': '', 'city': '', 'isp': ''}
else:
security_info("Region:" + jsondata['data']['region'])
security_info("ISP:" + jsondata['data']['isp'])
security_info("City:" + jsondata['data']['city'])
security_info("IP Address:" + arg)
task_push("ip",arg)
# Get IP Address

if __name__ == "__main__":
print assign("www","https://blog.hacking8.com")
from dummy import *
# print assign("www","https://blog.hacking8.com")
audit("47.52.234.181")
2 changes: 1 addition & 1 deletion plugins/www/807.py
Expand Up @@ -16,7 +16,7 @@ def assign(service, arg):
def audit(arg):
payload = 'solr/#/'
url = arg + payload
code, head, res, errcode, _ = curl.curl('"%s"' % url)
code, head, res, errcode, _ = curl.curl("%s" % url)
if code == 200 and 'Apache SOLR' in res :
security_info(url)

Expand Down
15 changes: 4 additions & 11 deletions test.py
@@ -1,16 +1,9 @@
# coding:utf-8

import urlparse
from dummy import *
import urlparse

u = "http://testphp.vulnweb.com/listproducts.php?artist=1&asfss=www"
parse = urlparse.urlparse(u)
print parse
if not parse.query:
pass



u = "https://bbs.125.la/?post=1"

for i in parse.query.split('&'):
k,v = i.split('=')
print k,is_number(v)
print urlparse.urlparse(u)
5 changes: 3 additions & 2 deletions w9scan.py
Expand Up @@ -13,7 +13,7 @@
from lib.core.common import banner
from lib.core.log import logger
import os
import inspect
import inspect,time
from distutils.version import LooseVersion
from lib.core.settings import VERSION
from lib.core.data import urlconfig
Expand Down Expand Up @@ -78,13 +78,14 @@ def main():
e.run()
e.init_spider()
s = crawler.SpiderMain(urlconfig.url)
time.sleep(0.5)
s.craw()
logger.report()
except KeyboardInterrupt:
logger.critical("[***] User Interrupt")
exit()
except Exception as info:
print "[xxx] MainError",info
print "[xxx] MainError",Exception,":",info
exit()


Expand Down

0 comments on commit 9e91341

Please sign in to comment.