diff --git a/config.ini b/config.ini new file mode 100644 index 0000000..a31a6a6 --- /dev/null +++ b/config.ini @@ -0,0 +1,111 @@ +[Common] +method_crawl : Crawling +method_direct : Direct +method_search : Search +method_log : Log +banner_delay : 3.0 +con_timeout : 3.0 +date_format : %%Y/%%m/%%d %%H:%%M:%%S +log_path : logs +log_file : gyoithon.log +signature_path : signatures +module_path : modules + +[CloudChecker] +# The newest Azure IP range is following. +# https://www.microsoft.com/en-us/download/confirmation.aspx?id=41653 +aws_srv_name : Amazon Web Service +aws_ip_range : https://ip-ranges.amazonaws.com/ip-ranges.json +azure_srv_name : Microsoft Azure +azure_ip_range : https://download.microsoft.com/download/0/1/8/018E208D-54F8-44CD-AA26-CD7BC9524A8C/PublicIPs_20181112.xml +gcp_srv_name : Google Cloud Platform +gcp_nslookup_cmd : nslookup -q=TXT +gcp_content_srv : _cloud-netblocks.googleusercontent.com +gcp_content_ip : 8.8.8.8 +gcp_get_domain_regex : include:(_cloud-netblocks\d{1,3}\.googleusercontent\.com) +gcp_get_nwaddr_regex : ip4:(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}/\d{1,2}) + +[VersionChecker] +signature_file : signature_product.txt + +[VersionCheckerML] +category : CMS +train_path : train_data +trained_path : trained_data +train_os_in : train_os_in.txt +train_os_out : train_os_out.pkl +train_web_in : train_web_in.txt +train_web_out : train_web_out.pkl +train_framework_in : train_framework_in.txt +train_framework_out : train_framework_out.pkl +train_cms_in : train_cms_in.txt +train_cms_out : train_cms_out.pkl + +[CommentChecker] +signature_file : signature_comment.txt + +[ErrorChecker] +signature_file : signature_error.txt + +[PageChecker] +train_path : train_data +train_page : train_page_type.txt +trained_path : trained_data +trained_page : train_page_type.pkl +signature_file : signature_page_type_from_url.txt + +[GoogleHack] +api_key : your_api_key +search_engine_id : your_search_engine_id +signature_file : signature_search_query.txt +api_strict_key : Referer +api_strict_value : http://example.com +start_index : 1 +delay_time : 1.0 + +[ContentExplorer] +signature_file : signature_default_content.txt +delay_time : 1.0 + +[CveExplorerNVD] +con_timeout : 60.0 +max_cve_count : 3 +vuln_db_dir : vuln_db +nvd_name : vulns_nvd.csv +nvd_db_header : last_modified_date@data_type@problem_type@id@cvss_v2_score@cvss_v3_score@category@vendor_name@product_name@version_value@update_value@edition_value@description +nvd_year_name : vulns_*_nvd.csv +cve_years : 2002@2003@2004@2005@2006@2007@2008@2009@2010@2011@2012@2013@2014@2015@2016@2017@2018 +nvd_meta_url : https://nvd.nist.gov/feeds/json/cve/1.0/nvdcve-1.0-*.meta +nvd_zip_url : https://nvd.nist.gov/feeds/json/cve/1.0/nvdcve-1.0-*.json.zip +nvd_chk_date_regex : ^lastModifiedDate:(.*T.*)-.*[\r\n] +nvd_chk_hash_regex : ^sha256:(.*)[\r\n] +nvd_date_format : %%Y-%%m-%%dT%%H:%%M:%%S + +[Report] +report_path : report +report_name : gyoithon_report_*.csv +report_name_exploit : gyoithon_report.html +report_temp : report_temp.csv +template : report_template.html +header : fqdn@ip_addr@port@cloud_type@method@url@vendor_name@prod_name@prod_version@prod_trigger@prod_type@prod_vuln@origin_login@origin_login_trigger@wrong_comment@error_msg@server_header@log@date + +[Spider] +output_base_path : crawl_result +output_filename : _crawl_result.json +depth_limit : 2 +delay_time : 3.0 + +[Exploit] +server_host : 192.168.220.150 +server_port : 55553 +msgrpc_user : test +msgrpc_pass : test1234 +timeout : 10 +LHOST : 192.168.220.150 +LPORT : 4444 +data_path : data +conversion_table : conversion_table.csv + +[Censys] +api_id : your_api_id +secret : your_secret_key diff --git a/gyoithon.py b/gyoithon.py index 75d81aa..2a84e6d 100644 --- a/gyoithon.py +++ b/gyoithon.py @@ -1,17 +1,29 @@ #!/usr/bin/env python # -*- coding:utf-8 -*- import os -import re -import ipaddress import codecs import time -import pandas as pd +import glob +import configparser import urllib3 +from docopt import docopt from urllib3 import util -from classifier4gyoithon.GyoiClassifier import DeepClassifier -from classifier4gyoithon.GyoiExploit import Metasploit -from classifier4gyoithon.GyoiReport import CreateReport from util import Utilty +from modules.Gyoi_CloudChecker import CloudChecker +from modules.Gyoi_VersionChecker import VersionChecker +from modules.Gyoi_VersionCheckerML import VersionCheckerML +from modules.Gyoi_CommentChecker import CommentChecker +from modules.Gyoi_ErrorChecker import ErrorChecker +from modules.Gyoi_Report import CreateReport +from modules.Gyoi_PageTypeChecker import PageChecker +from modules.Gyoi_GoogleHack import GoogleCustomSearch +from modules.Gyoi_ContentExplorer import ContentExplorer +from modules.Gyoi_SpiderControl import SpiderControl +from modules.Gyoi_CveExplorerNVD import CveExplorerNVD +from modules.Gyoi_Exploit import Exploit +from modules.Gyoi_Censys import Censys +from urllib3.exceptions import InsecureRequestWarning +urllib3.disable_warnings(InsecureRequestWarning) # Type of printing. OK = 'ok' # [*] @@ -21,183 +33,11 @@ NONE = 'none' # No label. -# Identify product name using signature. -def identify_product(categoy, target_url, response, utility): - product_list = [] - reason_list = [] - full_path = os.path.dirname(os.path.abspath(__file__)) - file_name = 'signature_' + categoy + '.txt' - try: - with codecs.open(os.path.join(full_path + '/signatures/', file_name), 'r', 'utf-8') as fin: - matching_patterns = fin.readlines() - for pattern in matching_patterns: - items = pattern.replace('\r', '').replace('\n', '').split('@') - keyword_list = [] - product = items[0] - signature = items[1] - list_match = re.findall(signature, response, flags=re.IGNORECASE) - if len(list_match) != 0: - # Output result (header) - keyword_list.append(list_match) - utility.print_message(OK, 'category : {}'.format(categoy)) - utility.print_message(OK, 'product : {}'.format(product)) - utility.print_message(OK, 'reason : {}'.format(keyword_list)) - utility.print_message(OK, 'target url : {}'.format(target_url)) - utility.print_message(NONE, '-' * 42) - product_list.append(product) - reason_list.append(keyword_list) - except Exception as err: - utility.print_exception(err, '{}'.format(err)) - return product_list, reason_list - - -# Classifier product name using signatures. -def classifier_signature(ip_addr, port, target_url, response, log_file, utility): - utility.print_message(NOTE, 'Analyzing gathered HTTP response using Signature.') - ip_list = [] - port_list = [] - vhost_list = [] - judge_list = [] - version_list = [] - reason_list = [] - scan_type_list = [] - ua_list = [] - http_ver_list = [] - ssl_list = [] - sni_list = [] - url_list = [] - log_list = [] - product_list = [] - for category in ['os', 'web', 'framework', 'cms']: - products, keywords = identify_product(category, target_url, response, utility) - for product, keyword in zip(products, keywords): - ip_list.append(ip_addr) - port_list.append(port) - vhost_list.append(ip_addr) - judge_list.append(category + ':' + str(product)) - version_list.append('-') - reason_list.append(keyword) - scan_type_list.append('[ip]') - ua_list.append('-') - http_ver_list.append('HTTP/1.1') - ssl_list.append('-') - sni_list.append('-') - url_list.append(target_url) - log_list.append(log_file) - product_list.append(product) - - if len(product_list) == 0: - utility.print_message(WARNING, 'Product Not Found.') - return [] - - # logging. - series_ip = pd.Series(ip_list) - series_port = pd.Series(port_list) - series_vhost = pd.Series(vhost_list) - series_judge = pd.Series(judge_list) - series_version = pd.Series(version_list) - series_reason = pd.Series(reason_list) - series_scan_type = pd.Series(scan_type_list) - series_ua = pd.Series(ua_list) - series_http_ver = pd.Series(http_ver_list) - series_ssl = pd.Series(ssl_list) - series_sni = pd.Series(sni_list) - series_url = pd.Series(url_list) - series_log = pd.Series(log_list) - df = pd.DataFrame({'ip': series_ip, - 'port': series_port, - 'vhost': series_vhost, - 'judge': series_judge, - 'judge_version': series_version, - 'reason': series_reason, - 'scantype': series_scan_type, - 'ua': series_ua, - 'version': series_http_ver, - 'ssl': series_ssl, - 'sni': series_sni, - 'url': series_url, - 'log': series_log}, - columns=['ip', 'port', 'vhost', 'judge', 'judge_version', 'reason', - 'scantype', 'ua', 'version', 'ssl', 'sni', 'url', 'log']) - saved_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'gyoithon') - df.sort_values(by='port', ascending=False).to_csv(os.path.join(saved_path, 'webconf.csv'), - mode='a', - header=False, - index=False) - return product_list - - -# Create webconf.csv -def create_webconf(ip_addr, port, log_file): - utility.print_message(NOTE, 'Create "webconf.csv".') - series_ip = pd.Series([ip_addr]) - series_port = pd.Series([str(port)]) - series_vhost = pd.Series([ip_addr]) - series_judge = pd.Series(['-']) - series_version = pd.Series(['-']) - series_reason = pd.Series(['-']) - series_scan_type = pd.Series(['-']) - series_ua = pd.Series(['-']) - series_http_ver = pd.Series(['-']) - series_ssl = pd.Series(['-']) - series_sni = pd.Series(['-']) - series_url = pd.Series(['-']) - series_log = pd.Series([log_file]) - df = pd.DataFrame({'ip': series_ip, - 'port': series_port, - 'vhost': series_vhost, - 'judge': series_judge, - 'judge_version': series_version, - 'reason': series_reason, - 'scantype': series_scan_type, - 'ua': series_ua, - 'version': series_http_ver, - 'ssl': series_ssl, - 'sni': series_sni, - 'url': series_url, - 'log': series_log}, - columns=['ip', 'port', 'vhost', 'judge', 'judge_version', 'reason', - 'scantype', 'ua', 'version', 'ssl', 'sni', 'url', 'log']) - saved_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'gyoithon') - df.sort_values(by='port', ascending=False).to_csv(os.path.join(saved_path, 'webconf.csv'), index=False) - - -# Check IP address format. -def is_valid_ip(arg): - try: - ipaddress.ip_address(arg) - return True - except ValueError: - return False - - -# Check argument values. -def check_arg_value(ip_addr, port, path, utility): - # Check IP address. - if is_valid_ip(ip_addr) is False: - utility.print_message(FAIL, 'Invalid IP address: {}'.format(ip_addr)) - return False - - # Check port number. - if port.isdigit() is False: - utility.print_message(FAIL, 'Invalid port number: {}'.format(port)) - return False - elif (int(port) < 1) or (int(port) > 65535): - utility.print_message(FAIL, 'Invalid port number: {}'.format(port)) - return False - - # Check path. - if isinstance(path, str) is False and isinstance(path, int) is False: - utility.print_message(FAIL, 'Invalid path: {}'.format(path)) - return False - - return True - - # Get target information. -def get_target_info(utility): - full_path = os.path.dirname(os.path.abspath(__file__)) - ip_addr = [] +def get_target_info(full_path, utility): + utility.write_log(20, '[In] Get target information [{}].'.format(os.path.basename(__file__))) + protocol = [] + fqdn = [] port = [] path = [] try: @@ -205,113 +45,305 @@ def get_target_info(utility): targets = fin.readlines() for target in targets: items = target.replace('\r', '').replace('\n', '').split(' ') - ip_addr.append(items[0]) - port.append(items[1]) - path.append(items[2]) - except Exception as err: - utility.print_message(FAIL, 'Invalid file: {}'.format(err)) + if len(items) != 4: + utility.print_message(FAIL, 'Invalid target record : {}'.format(target)) + utility.write_log(30, 'Invalid target record : {}'.format(target)) + continue + protocol.append(items[0]) + fqdn.append(items[1]) + port.append(items[2]) + path.append(items[3]) + except Exception as e: + utility.print_message(FAIL, 'Invalid file: {}'.format(e)) + utility.write_log(30, 'Invalid file: {}'.format(e)) - return ip_addr, port, path + utility.write_log(20, '[Out] Get target information [{}].'.format(os.path.basename(__file__))) + return protocol, fqdn, port, path # Display banner. def show_banner(utility): banner = """ -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ██████╗██╗ ██╗ ██████╗ ██╗████████╗██╗ ██╗ ██████╗ ███╗ ██╗ ██╔════╝╚██╗ ██╔╝██╔═══██╗██║╚══██╔══╝██║ ██║██╔═══██╗████╗ ██║ ██║ ███╗╚████╔╝ ██║ ██║██║ ██║ ███████║██║ ██║██╔██╗ ██║ ██║ ██║ ╚██╔╝ ██║ ██║██║ ██║ ██╔══██║██║ ██║██║╚██╗██║ ╚██████╔╝ ██║ ╚██████╔╝██║ ██║ ██║ ██║╚██████╔╝██║ ╚████║ ╚═════╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝ (beta) -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ """ + 'by ' + os.path.basename(__file__) utility.print_message(NONE, banner) show_credit(utility) - time.sleep(3.0) + time.sleep(utility.banner_delay) # Show credit. def show_credit(utility): credit = u""" - =[ GyoiThon v0.0.1-beta ]= + =[ GyoiThon v0.0.2-beta ]= + -- --=[ Author : Gyoiler (@gyoithon) ]=-- + -- --=[ Website : https://github.com/gyoisamurai/GyoiThon/ ]=-- """ utility.print_message(NONE, credit) +# Define command option. +__doc__ = """{f} +usage: + {f} [-m] [-g] [-e] [-c] [-p] [-l ] + {f} -h | --help +options: + -m Optional : Analyze HTTP response for identify product/version using Machine Learning. + -g Optional : Google Custom Search for identify product/version. + -e Optional : Explore default path of product. + -c Optional : Discover open ports and wrong ssl server certification using Censys. + -p Optional : Execute exploit module using Metasploit. + -l Optional : Analyze log based HTTP response for identify product/version. + -h --help Show this help message and exit. +""".format(f=__file__) + + +# Parse command arguments. +def command_parse(utility): + utility.write_log(20, '[In] Parse command options [{}].'.format(os.path.basename(__file__))) + + args = docopt(__doc__) + opt_ml = args['-m'] + opt_gcs = args['-g'] + opt_explore = args['-e'] + opt_censys = args['-c'] + opt_exploit = args['-p'] + opt_log = args['-l'] + opt_log_path = args[''] + + utility.write_log(20, '[Out] Parse command options [{}].'.format(os.path.basename(__file__))) + return opt_ml, opt_gcs, opt_explore, opt_censys, opt_exploit, opt_log, opt_log_path + + # main. if __name__ == '__main__': + file_name = os.path.basename(__file__) + full_path = os.path.dirname(os.path.abspath(__file__)) + utility = Utilty() - show_banner(utility) + utility.write_log(20, '[In] GyoiThon [{}].'.format(file_name)) - # Get target information. - ip_list, port_list, path_list = get_target_info(utility) + # Get command arguments. + opt_ml, opt_gcs, opt_explore, opt_censys, opt_exploit, opt_log, opt_log_path = command_parse(utility) - # Check parameters. - product_list = [] - full_path = os.path.dirname(os.path.abspath(__file__)) - for idx in range(len(ip_list)): - if check_arg_value(ip_list[idx], port_list[idx], path_list[idx], utility) is False: - utility.print_message(FAIL, 'Invalid parameter: {}, {}, {}'.format(ip_list[idx], - port_list[idx], - path_list[idx])) - - # Start Spider. - scheme = ['http', 'https'] - web_target_info = utility.run_spider(scheme, ip_list[idx], port_list[idx], path_list[idx]) - - # Get HTTP responses. - log_file = os.path.join(full_path + '/gyoithon/', 'get_' + ip_list[idx] + '_' + str(port_list[idx]) + '_ip.log') - create_webconf(ip_list[idx], port_list[idx], log_file) - for target in web_target_info: - for target_url in target[2]: - # Check target url. - parsed = None - try: - parsed = util.parse_url(target_url) - except Exception as err: - utility.print_exception(err, 'Parsed error: {}'.format(target_url)) - continue + # Read config.ini. + config = configparser.ConfigParser() + config.read(os.path.join(full_path, 'config.ini')) + + # Common setting value. + log_path = '' + method_crawl = '' + method_log = '' + try: + log_dir = config['Common']['log_path'] + log_path = os.path.join(full_path, log_dir) + method_crawl = config['Common']['method_crawl'] + method_log = config['Common']['method_log'] + except Exception as e: + msg = 'Reading config.ini is failure : {}'.format(e) + utility.print_exception(e, msg) + utility.write_log(40, msg) + utility.write_log(20, '[Out] GyoiThon [{}].'.format(file_name)) + exit(1) + + # Show banner. + show_banner(utility) - # Get HTTP response (header + body). - response = '' - http = urllib3.PoolManager(timeout=utility.http_timeout) - try: - utility.print_message(OK, '{} {}'.format(utility.get_current_date('%Y-%m-%d %H:%M:%S'), - target_url)) - res = http.request('GET', target_url) - for header in res.headers.items(): - response += header[0] + ': ' + header[1] + '\r\n' - response += '\r\n\r\n' + res.data.decode('utf-8') + # Create instances. + cloud_checker = CloudChecker(utility) + version_checker = VersionChecker(utility) + version_checker_ml = VersionCheckerML(utility) + comment_checker = CommentChecker(utility) + error_checker = ErrorChecker(utility) + page_checker = PageChecker(utility) + google_hack = GoogleCustomSearch(utility) + content_explorer = ContentExplorer(utility) + spider = SpiderControl(utility) + report = CreateReport(utility) + cve_explorer = CveExplorerNVD(utility) + censys = Censys(utility) + + # Get target information from "host.txt". + protocol_list, fqdn_list, port_list, path_list = get_target_info(full_path, utility) + + # Start investigation. + for idx in range(len(fqdn_list)): + # Check parameters. + msg = 'investigation : {}, {}, {}, {}'.format(protocol_list[idx], fqdn_list[idx], port_list[idx], path_list[idx]) + utility.write_log(20, 'Start ' + msg) + if utility.check_arg_value(protocol_list[idx], fqdn_list[idx], port_list[idx], path_list[idx]) is False: + msg = 'Invalid parameter : {}, {}, {}, {}'.format(protocol_list[idx], fqdn_list[idx], + port_list[idx], path_list[idx]) + utility.print_message(FAIL, msg) + utility.write_log(30, msg) + continue + + # Create report header. + report.create_report_header(fqdn_list[idx], path_list[idx].replace('/', '')) + + # Check cloud service. + cloud_type = cloud_checker.get_cloud_service(fqdn_list[idx]) + + # Search Censys. + if opt_censys: + censys.search_censys(protocol_list[idx], utility.forward_lookup(fqdn_list[idx]), fqdn_list[idx]) + + # Analysis HTTP responses. + product_list = [] + if opt_log: + # Check stored logs. + if os.path.exists(opt_log_path) is False: + utility.print_message(FAIL, 'Path not found: {}'.format(opt_log_path)) + utility.write_log(30, 'Path not found : {}'.format(opt_log_path)) + utility.write_log(20, '[Out] Analyze log [{}].'.format(os.path.basename(__file__))) + else: + log_list = glob.glob(os.path.join(opt_log_path, '*.log')) + for log_idx, path in enumerate(log_list): + try: + with codecs.open(path, 'r', 'utf-8') as fin: + target_log = fin.read() + date = utility.get_current_date('%Y%m%d%H%M%S%f')[:-3] + print_date = utility.transform_date_string( + utility.transform_date_object(date[:-3], '%Y%m%d%H%M%S')) + + msg = '{}/{} Checking : Log: {}'.format(log_idx + 1, len(log_list), path) + utility.print_message(OK, msg) + utility.write_log(20, msg) + + # Check product name/version using signature. + product_list = version_checker.get_product_name(target_log) + + # Check product name/version using Machine Learning. + if opt_ml: + product_list.extend(version_checker_ml.get_product_name(target_log)) + + # Get CVE for products. + product_list = cve_explorer.cve_explorer(product_list) + + # Check unnecessary comments. + comments = comment_checker.get_bad_comment(target_log) + + # Check unnecessary error messages. + errors = error_checker.get_error_message(target_log) + + # Create report. + report.create_report_body('-', + fqdn_list[idx], + path_list[idx].replace('/', ''), + port_list[idx], + cloud_type, + method_log, + product_list, + {}, + comments, + errors, + '-', + path, + print_date) + except Exception as e: + utility.print_exception(e, 'Not read log : {}'.format(path)) + utility.write_log(30, 'Not read log : {}'.format(path)) + else: + # Gather target url using Spider. + web_target_info = spider.run_spider(protocol_list[idx], fqdn_list[idx], port_list[idx], path_list[idx]) + + # Get HTTP responses. + for target in web_target_info: + for count, target_url in enumerate(target[2]): + utility.print_message(NOTE, '{}/{} Start analyzing: {}'.format(count+1, len(target[2]), target_url)) + + # Check target url. + parsed = None + try: + parsed = util.parse_url(target_url) + except Exception as e: + utility.print_exception(e, 'Parsed error : {}'.format(target_url)) + utility.write_log(30, 'Parsed error : {}'.format(target_url)) + continue + + # Get HTTP response (header + body). + date = utility.get_current_date('%Y%m%d%H%M%S%f')[:-3] + print_date = utility.transform_date_string(utility.transform_date_object(date[:-3], '%Y%m%d%H%M%S')) + _, server_header, res_header, res_body = utility.send_request('GET', target_url) # Write log. + log_name = protocol_list[idx] + '_' + fqdn_list[idx] + '_' + str(port_list[idx]) + '_' + date + '.log' + log_path_fqdn = os.path.join(log_path, fqdn_list[idx] + '_' + path_list[idx].replace('/', '')) + if os.path.exists(log_path_fqdn) is False: + os.mkdir(log_path_fqdn) + log_file = os.path.join(log_path_fqdn, log_name) with codecs.open(log_file, 'w', 'utf-8') as fout: - fout.write(response) - except Exception as err: - utility.print_exception(err, 'Target URL: {}'.format(target_url)) - continue + fout.write(target_url + '\n\n' + res_header + res_body) + + # Check product name/version using signature. + product_list = version_checker.get_product_name(res_header + res_body) + + # Check product name/version using Machine Learning. + if opt_ml: + product_list.extend(version_checker_ml.get_product_name(res_header + res_body)) + + # Get CVE for products. + product_list = cve_explorer.cve_explorer(product_list) + + # Check unnecessary comments. + comments = comment_checker.get_bad_comment(res_body) + + # Check unnecessary error messages. + errors = error_checker.get_error_message(res_body) + + # Check login page. + page_type = page_checker.judge_page_type(target_url, res_body) + + # Create report. + report.create_report_body(target_url, + fqdn_list[idx], + path_list[idx].replace('/', ''), + port_list[idx], + cloud_type, + method_crawl, + product_list, + page_type, + comments, + errors, + server_header, + log_file, + print_date) + + # Check unnecessary contents using Google Hack. + if opt_gcs: + product_list = google_hack.execute_google_hack(cve_explorer, + fqdn_list[idx], + path_list[idx].replace('/', ''), + report) + + # Check unnecessary contents using Explore contents. + if opt_explore: + product_list.extend(content_explorer.content_explorer(cve_explorer, + protocol_list[idx], + fqdn_list[idx], + path_list[idx].replace('/', ''), + port_list[idx], + path_list[idx], + report)) + + # Execute exploitation. + if opt_exploit: + exploit = Exploit(utility) + exploit_product = list(map(list, set(map(tuple, [[products[1], products[2]] for products in product_list])))) + exploit.exploit({'ip': utility.forward_lookup(fqdn_list[idx]), + 'port': int(port_list[idx]), + 'prod_list': exploit_product}) + + # Create exploiting report. + report.create_exploit_report() + + utility.write_log(20, 'End ' + msg) - # Judge product name using string matching. - products = classifier_signature(ip_list[idx], port_list[idx], target_url, response, log_file, utility) - for product in products: - product_list.append(product) - - # Classifier using Machine Learning. - classifier = DeepClassifier() - products = classifier.analyzer(ip_list[idx], int(port_list[idx]), ip_list[idx], False, target_url) - for product in products: - product_list.append(product) - time.sleep(0.5) - - # Exploit using Metasploit. - product_list = list(set(product_list)) - for product in product_list: - metasploit = Metasploit() - metasploit.exploit({'ip': ip_list[idx], 'port': int(port_list[idx]), 'prod_name': product}) - - # Create Report. - report = CreateReport() - report.create_report() print(os.path.basename(__file__) + ' finish!!') + utility.write_log(20, '[Out] GyoiThon [{}].'.format(file_name)) diff --git a/host.txt b/host.txt index 4cc54ff..0b33084 100644 --- a/host.txt +++ b/host.txt @@ -1,2 +1,4 @@ -192.168.220.148 80 /tikiwiki/ -192.168.220.148 80 /oscommerce/catalog/ +http 192.168.220.129 80 /cyclone/ +http 192.168.220.129 80 /WackoPicko/ +http 192.168.220.129 80 /bodgeit/ +http 192.168.220.129 80 /vicnum/ diff --git a/logs/gyoithon.log b/logs/gyoithon.log new file mode 100644 index 0000000..47e812d --- /dev/null +++ b/logs/gyoithon.log @@ -0,0 +1 @@ +INFO,2018/11/06 07:43:12 [In] GyoiThon [gyoithon.py]. diff --git a/modules/Gyoi_Censys.py b/modules/Gyoi_Censys.py new file mode 100644 index 0000000..4e1cfed --- /dev/null +++ b/modules/Gyoi_Censys.py @@ -0,0 +1,51 @@ +#!/bin/env python +# -*- coding: utf-8 -*- +import os +import sys +import configparser +import censys +from censys import * + +# Type of printing. +OK = 'ok' # [*] +NOTE = 'note' # [+] +FAIL = 'fail' # [-] +WARNING = 'warn' # [!] +NONE = 'none' # No label. + + +class Censys: + def __init__(self, utility): + # Read config.ini. + self.utility = utility + config = configparser.ConfigParser() + self.file_name = os.path.basename(__file__) + self.full_path = os.path.dirname(os.path.abspath(__file__)) + self.root_path = os.path.join(self.full_path, '../') + config.read(os.path.join(self.root_path, 'config.ini')) + + try: + self.api_id = config['Censys']['api_id'] + self.secret = config['Censys']['secret'] + except Exception as e: + self.utility.print_message(FAIL, 'Reading config.ini is failure : {}'.format(e)) + self.utility.write_log(40, 'Reading config.ini is failure : {}'.format(e)) + sys.exit(1) + + # Control censys. + def search_censys(self, protocol, ip_addr, fqdn): + self.utility.print_message(NOTE, 'Search Censys.') + self.utility.write_log(20, '[In] Search Censys [{}].'.format(self.file_name)) + + api = censys.ipv4.CensysIPv4(api_id=self.api_id, api_secret=self.secret) + for result in api.search('ip:{}'.format(ip_addr)): + self.utility.print_message(OK, 'Open web ports: {}'.format(result['protocols'])) + + # Check cloud service name. + if protocol == 'https': + api = censys.certificates.CensysCertificates(api_id=self.api_id, api_secret=self.secret) + fields = ["parsed.subject_dn", "parsed.fingerprint_sha256"] + for cert in api.search('tags: trusted and parsed.names: {}'.format(fqdn), fields=fields): + self.utility.print_message(OK, 'Certification info: {}'.format(cert)) + + self.utility.write_log(20, '[Out] Search Censys [{}].'.format(self.file_name)) diff --git a/modules/Gyoi_CloudChecker.py b/modules/Gyoi_CloudChecker.py new file mode 100644 index 0000000..fe661cb --- /dev/null +++ b/modules/Gyoi_CloudChecker.py @@ -0,0 +1,188 @@ +#!/bin/env python +# -*- coding: utf-8 -*- +import os +import sys +import re +import json +import urllib3 +import ipaddress +import subprocess +import configparser +from bs4 import BeautifulSoup +from urllib3.exceptions import InsecureRequestWarning +urllib3.disable_warnings(InsecureRequestWarning) + +# Type of printing. +OK = 'ok' # [*] +NOTE = 'note' # [+] +FAIL = 'fail' # [-] +WARNING = 'warn' # [!] +NONE = 'none' # No label. + + +class CloudChecker: + def __init__(self, utility): + # Read config.ini. + self.utility = utility + config = configparser.ConfigParser() + self.file_name = os.path.basename(__file__) + self.full_path = os.path.dirname(os.path.abspath(__file__)) + self.root_path = os.path.join(self.full_path, '../') + config.read(os.path.join(self.root_path, 'config.ini')) + + try: + self.aws_srv_name = config['CloudChecker']['aws_srv_name'] + self.aws_ip_range = config['CloudChecker']['aws_ip_range'] + self.azure_srv_name = config['CloudChecker']['azure_srv_name'] + self.azure_ip_range = config['CloudChecker']['azure_ip_range'] + self.gcp_srv_name = config['CloudChecker']['gcp_srv_name'] + self.gcp_nslookup_cmd = config['CloudChecker']['gcp_nslookup_cmd'] + self.gcp_content_srv = config['CloudChecker']['gcp_content_srv'] + self.gcp_content_ip = config['CloudChecker']['gcp_content_ip'] + self.gcp_get_domain_regex = config['CloudChecker']['gcp_get_domain_regex'] + self.gcp_get_nwaddr_regex = config['CloudChecker']['gcp_get_nwaddr_regex'] + except Exception as e: + self.utility.print_message(FAIL, 'Reading config.ini is failure : {}'.format(e)) + self.utility.write_log(40, 'Reading config.ini is failure : {}'.format(e)) + sys.exit(1) + + # Check AWS. + def check_aws(self, ip_addr): + self.utility.print_message(NOTE, 'Check AWS IP range.') + self.utility.write_log(20, '[In] Check AWS IP range [{}].'.format(self.file_name)) + + # Get IP range list. + self.utility.write_log(20, 'Accessing : {}'.format(self.aws_ip_range)) + http = urllib3.PoolManager(timeout=self.utility.con_timeout) + res = http.request('GET', self.aws_ip_range) + aws_nw_addres = json.loads(res.data.decode('utf-8'))['prefixes'] + + # Check all aws ip_address. + target_ip = ipaddress.ip_address(ip_addr) + for aws_nw_addr in aws_nw_addres: + if target_ip in ipaddress.ip_network(aws_nw_addr['ip_prefix']): + msg = 'Detect : service=AWS target={} prefix={} region={} service={}'.format(target_ip, + aws_nw_addr['ip_prefix'], + aws_nw_addr['region'], + aws_nw_addr['service']) + self.utility.print_message(OK, msg) + self.utility.write_log(20, msg) + self.utility.write_log(20, '[Out] Check AWS IP range [{}].'.format(self.file_name)) + return True + else: + self.utility.print_message(FAIL, 'Not include : service=AWS target={} prefix={}' + .format(target_ip, aws_nw_addr['ip_prefix'])) + self.utility.write_log(20, '[Out] Check AWS IP range [{}].'.format(self.file_name)) + return False + + # Check Azure. + def check_azure(self, ip_addr): + self.utility.print_message(NOTE, 'Check Azure IP range.') + self.utility.write_log(20, '[In] Check Azure IP range [{}].'.format(self.file_name)) + + # Get IP range list. + self.utility.write_log(20, 'Accessing : {}'.format(self.azure_ip_range)) + http = urllib3.PoolManager(timeout=self.utility.con_timeout) + res = http.request('GET', self.azure_ip_range) + soup = BeautifulSoup(res.data.decode('utf-8').lower(), 'lxml') + regions = soup.find_all('region') + + # Check all azure ip_address. + target_ip = ipaddress.ip_address(ip_addr) + for idx, region in enumerate(regions): + azure_nw_addres = [] + region_name = region.attrs['name'] + for content in region.contents: + if content.name == 'iprange': + azure_nw_addres.append(content['subnet']) + + for azure_nw_addr in azure_nw_addres: + if target_ip in ipaddress.ip_network(azure_nw_addr): + msg = 'Detect : service=Azure target={} prefix={} region={}'.format(target_ip, azure_nw_addr, region_name) + self.utility.print_message(OK, msg) + self.utility.write_log(20, msg) + self.utility.write_log(20, '[Out] Check Azure IP range [{}].'.format(self.file_name)) + return True + else: + self.utility.print_message(FAIL, 'Not include : service=Azure target={} prefix={}' + .format(target_ip, azure_nw_addr)) + self.utility.write_log(20, '[Out] Check Azure IP range [{}].'.format(self.file_name)) + return False + + # Check GCP. + def check_gcp(self, ip_addr): + self.utility.print_message(NOTE, 'Check GCP IP range.') + self.utility.write_log(20, '[In] Check GCP IP range [{}].'.format(self.file_name)) + + # Get Domain in SPF record using nslookup command. + raw_domains = '' + nslookup_cmd = self.gcp_nslookup_cmd + ' ' + self.gcp_content_srv + ' ' + self.gcp_content_ip + try: + self.utility.write_log(20, 'Execute : {}'.format(nslookup_cmd)) + raw_domains = subprocess.check_output(nslookup_cmd, shell=True) + except Exception as e: + msg = 'Executing {} is failure.'.format(nslookup_cmd) + self.utility.print_exception(e, msg) + self.utility.write_log(30, msg) + self.utility.write_log(20, '[Out] Check GCP IP range [{}].'.format(self.file_name)) + return False + + # Set character code. + char_code = '' + if os.name == 'nt': + char_code = 'shift-jis' + else: + char_code = 'utf-8' + + # Get Network addresses from each domain. + gcp_domain_list = re.findall(self.gcp_get_domain_regex, raw_domains.decode(char_code)) + gcp_nw_addres = [] + for gcp_domain in gcp_domain_list: + nslookup_cmd = self.gcp_nslookup_cmd + ' ' + gcp_domain + ' ' + self.gcp_content_ip + try: + self.utility.write_log(20, 'Execute : {}'.format(nslookup_cmd)) + raw_ip = subprocess.check_output(nslookup_cmd, shell=True) + except Exception as e: + msg = 'Executing {} is failure.'.format(nslookup_cmd) + self.utility.print_exception(e, msg) + self.utility.write_log(30, msg) + continue + + gcp_nwaddres_from_one_domain = re.findall(self.gcp_get_nwaddr_regex, raw_ip.decode(char_code)) + for nwaddr in gcp_nwaddres_from_one_domain: + gcp_nw_addres.append(nwaddr) + + # Check all gcp ip_address. + target_ip = ipaddress.ip_address(ip_addr) + for gcp_nw_addr in gcp_nw_addres: + if target_ip in ipaddress.ip_network(gcp_nw_addr): + msg = 'Detect : service=GCP target={} prefix={}'.format(target_ip, gcp_nw_addr) + self.utility.print_message(OK, msg) + self.utility.write_log(20, msg) + self.utility.write_log(20, '[Out] Check GCP IP range [{}].'.format(self.file_name)) + return True + else: + self.utility.print_message(FAIL, 'Not include : service=GCP target={} prefix={}' + .format(target_ip, gcp_nw_addr)) + self.utility.write_log(20, '[Out] Check GCP IP range [{}].'.format(self.file_name)) + return False + + # Identify cloud service name. + def get_cloud_service(self, fqdn): + self.utility.print_message(NOTE, 'Analyze cloud service.') + self.utility.write_log(20, '[In] Analyze cloud service [{}].'.format(self.file_name)) + target_ip = self.utility.forward_lookup(fqdn) + + # Check cloud service name. + if self.check_aws(target_ip) is True: + self.utility.write_log(20, '[Out] Analyze cloud service [{}].'.format(self.file_name)) + return self.aws_srv_name + elif self.check_azure(target_ip) is True: + self.utility.write_log(20, '[Out] Analyze cloud service [{}].'.format(self.file_name)) + return self.azure_srv_name + elif self.check_gcp(target_ip) is True: + self.utility.write_log(20, '[Out] Analyze cloud service [{}].'.format(self.file_name)) + return self.gcp_srv_name + else: + self.utility.write_log(20, '[Out] Analyze cloud service [{}].'.format(self.file_name)) + return 'Unknown' diff --git a/modules/Gyoi_CommentChecker.py b/modules/Gyoi_CommentChecker.py new file mode 100644 index 0000000..4d0d50b --- /dev/null +++ b/modules/Gyoi_CommentChecker.py @@ -0,0 +1,89 @@ +#!/bin/env python +# -*- coding: utf-8 -*- +import os +import sys +import codecs +import re +import configparser +from bs4 import BeautifulSoup +from bs4 import Comment + +# Type of printing. +OK = 'ok' # [*] +NOTE = 'note' # [+] +FAIL = 'fail' # [-] +WARNING = 'warn' # [!] +NONE = 'none' # No label. + + +class CommentChecker: + def __init__(self, utility): + # Read config.ini. + self.utility = utility + config = configparser.ConfigParser() + self.file_name = os.path.basename(__file__) + self.full_path = os.path.dirname(os.path.abspath(__file__)) + self.root_path = os.path.join(self.full_path, '../') + config.read(os.path.join(self.root_path, 'config.ini')) + + try: + self.signature_dir = os.path.join(self.root_path, config['Common']['signature_path']) + self.signature_file = config['CommentChecker']['signature_file'] + self.signature_path = os.path.join(self.signature_dir, self.signature_file) + except Exception as e: + self.utility.print_message(FAIL, 'Reading config.ini is failure : {}'.format(e)) + self.utility.write_log(40, 'Reading config.ini is failure : {}'.format(e)) + sys.exit(1) + + # Get html comments. + def get_html_comments(self, soup): + self.utility.write_log(20, '[In] Get html comments [{}].'.format(self.file_name)) + self.utility.write_log(20, '[Out] Get html comments [{}].'.format(self.file_name)) + return list(set(soup.find_all(string=lambda text: isinstance(text, Comment)))) + + # Get JavaScript comments. + def get_js_comments(self, soup): + self.utility.write_log(20, '[In] Get Javascript comments [{}].'.format(self.file_name)) + js_comment_list = [] + script_tags = soup.find_all('script') + for script_tag in script_tags: + target_text = script_tag.get_text() + js_comment_list.extend(re.findall(r'(/\*.*\*/)', target_text)) + js_comment_list.extend(re.findall(r'(//.*[\r\n])', target_text)) + self.utility.write_log(20, '[Out] Get Javascript comments [{}].'.format(self.file_name)) + return list(set(js_comment_list)) + + # Check unnecessary comments. + def get_bad_comment(self, response): + self.utility.print_message(NOTE, 'Check unnecessary comments.') + self.utility.write_log(20, '[In] Checkt unnecessary comments [{}].'.format(self.file_name)) + + # Check comment. + comment_list = [] + bad_comment_list = [] + soup = BeautifulSoup(response, 'html.parser') + + # Get comments. + comment_list.extend(self.get_html_comments(soup)) + comment_list.extend(self.get_js_comments(soup)) + try: + # Open signature file. + with codecs.open(self.signature_path, 'r', 'utf-8') as fin: + for comment in comment_list: + matching_patterns = fin.readlines() + for pattern in matching_patterns: + # Find bad comments. + list_match = re.findall(pattern, comment, flags=re.IGNORECASE) + + if len(list_match) != 0: + bad_comment_list.extend(list_match) + msg = 'Find unnecessary comment : {}'.format(list_match) + self.utility.print_message(OK, msg) + self.utility.write_log(20, msg) + except Exception as e: + self.utility.print_exception(e, 'Getting comment is failure :{}.'.format(e)) + self.utility.write_log(30, 'Getting comment is failure :{}.'.format(e)) + self.utility.write_log(20, '[Out] Checkt unnecessary comments [{}].'.format(self.file_name)) + if len(bad_comment_list) == 0: + self.utility.print_message(OK, 'Unnecessary comment not found.') + return list(set(bad_comment_list)) diff --git a/modules/Gyoi_ContentExplorer.py b/modules/Gyoi_ContentExplorer.py new file mode 100644 index 0000000..54b0fa9 --- /dev/null +++ b/modules/Gyoi_ContentExplorer.py @@ -0,0 +1,138 @@ +#!/bin/env python +# -*- coding: utf-8 -*- +import os +import sys +import codecs +import re +import time +import urllib3 +import configparser +from urllib3.exceptions import InsecureRequestWarning +urllib3.disable_warnings(InsecureRequestWarning) + +# Type of printing. +OK = 'ok' # [*] +NOTE = 'note' # [+] +FAIL = 'fail' # [-] +WARNING = 'warn' # [!] +NONE = 'none' # No label. + + +class ContentExplorer: + def __init__(self, utility): + # Read config.ini. + self.utility = utility + config = configparser.ConfigParser() + self.file_name = os.path.basename(__file__) + self.full_path = os.path.dirname(os.path.abspath(__file__)) + self.root_path = os.path.join(self.full_path, '../') + config.read(os.path.join(self.root_path, 'config.ini')) + + try: + self.signature_dir = os.path.join(self.root_path, config['Common']['signature_path']) + self.method_name = config['Common']['method_direct'] + self.signature_file = config['ContentExplorer']['signature_file'] + self.delay_time = float(config['ContentExplorer']['delay_time']) + except Exception as e: + self.utility.print_message(FAIL, 'Reading config.ini is failure : {}'.format(e)) + self.utility.write_log(40, 'Reading config.ini is failure : {}'.format(e)) + sys.exit(1) + + # Check product version. + def check_version(self, default_ver, version_pattern, response): + # Check version. + version = default_ver + if version_pattern != '*': + obj_match = re.search(version_pattern, response, flags=re.IGNORECASE) + if obj_match is not None and obj_match.re.groups > 1: + version = obj_match.group(2) + return version + + # Examine HTTP response. + def examine_response(self, check_pattern, default_ver, version_pattern, response): + self.utility.print_message(NOTE, 'Confirm string matching.') + self.utility.write_log(20, '[In] Confirm string matching [{}].'.format(self.file_name)) + + # Check exsisting contents. + result = [] + if check_pattern != '*' and re.search(check_pattern, response, flags=re.IGNORECASE) is not None: + result.append(True) + # Check product version. + result.append(self.check_version(default_ver, version_pattern, response)) + elif check_pattern == '*': + result.append(True) + # Check product version. + result.append(self.check_version(default_ver, version_pattern, response)) + else: + result.append(False) + result.append(default_ver) + return result + + # Explore unnecessary contents. + def content_explorer(self, cve_explorer, protocol, fqdn, root_path, port, path, report): + self.utility.print_message(NOTE, 'Explore unnecessary contents.') + self.utility.write_log(20, '[In] Explore contents [{}].'.format(self.file_name)) + + # Open signature file. + target_base = protocol + '://' + fqdn + ':' + str(port) + path + signature_file = os.path.join(self.signature_dir, self.signature_file) + product_list = [] + with codecs.open(signature_file, 'r', encoding='utf-8') as fin: + signatures = fin.readlines() + for idx, signature in enumerate(signatures): + items = signature.replace('\n', '').replace('\r', '').split('@') + category = items[0] + vendor = items[1].lower() + product_name = items[2].lower() + default_ver = items[3] + path = items[4] + check_pattern = items[5] + version_pattern = items[6] + is_login = items[7] + target_url = '' + if path.startswith('/') is True: + target_url = target_base + path[1:] + else: + target_url = target_base + path[4] + + # Get HTTP response (header + body). + date = self.utility.get_current_date('%Y%m%d%H%M%S%f')[:-3] + print_date = self.utility.transform_date_string( + self.utility.transform_date_object(date[:-3], '%Y%m%d%H%M%S')) + res, server_header, res_header, res_body = self.utility.send_request('GET', target_url) + msg = '{}/{} Accessing : Status: {}, Url: {}'.format(idx + 1, len(signatures), res.status, target_url) + self.utility.print_message(OK, msg) + self.utility.write_log(20, msg) + + # Write log. + log_name = protocol + '_' + fqdn + '_' + str(port) + '_' + date + '.log' + log_path_fqdn = os.path.join(os.path.join(self.root_path, 'logs'), + fqdn + '_' + root_path.replace('/', '')) + if os.path.exists(log_path_fqdn) is False: + os.mkdir(log_path_fqdn) + log_file = os.path.join(log_path_fqdn, log_name) + with codecs.open(log_file, 'w', 'utf-8') as fout: + fout.write(target_url + '\n\n' + res_header + res_body) + + if res.status in [200, 301, 302]: + # Examine HTTP response. + result = self.examine_response(check_pattern, default_ver, version_pattern, res_header + res_body) + if result[0] is True: + # Found unnecessary content or CMS admin page. + product = [category, vendor, product_name, result[1], path] + product = cve_explorer.cve_explorer([product]) + product_list.extend(product) + msg = 'Find product={}/{}, verson={}, trigger={}'.format(vendor, product_name, default_ver, path) + self.utility.print_message(OK, msg) + self.utility.write_log(20, msg) + + # Create report. + page_type = {} + if is_login == '1': + page_type = {'ml': {'prob': '-', 'reason': '-'}, 'url': {'prob': '100%', 'reason': path}} + report.create_report_body(target_url, fqdn, root_path, port, '*', self.method_name, product, + page_type, [], [], server_header, log_file, print_date) + + time.sleep(self.delay_time) + self.utility.write_log(20, '[Out] Explore contents [{}].'.format(self.file_name)) + return product_list diff --git a/modules/Gyoi_CveExplorerNVD.py b/modules/Gyoi_CveExplorerNVD.py new file mode 100644 index 0000000..62e8b24 --- /dev/null +++ b/modules/Gyoi_CveExplorerNVD.py @@ -0,0 +1,293 @@ +#!/bin/env python +# -*- coding: utf-8 -*- +import os +import sys +import re +import codecs +import json +import glob +import zipfile +import shutil +import urllib3 +import configparser +import pandas as pd +from urllib3.exceptions import InsecureRequestWarning +urllib3.disable_warnings(InsecureRequestWarning) + +# Type of printing. +OK = 'ok' # [*] +NOTE = 'note' # [+] +FAIL = 'fail' # [-] +WARNING = 'warn' # [!] +NONE = 'none' # No label. + + +class CveExplorerNVD: + def __init__(self, utility): + # Read config.ini. + self.utility = utility + config = configparser.ConfigParser() + self.file_name = os.path.basename(__file__) + self.full_path = os.path.dirname(os.path.abspath(__file__)) + self.root_path = os.path.join(self.full_path, '../') + config.read(os.path.join(self.root_path, 'config.ini')) + + try: + self.con_timeout = float(config['CveExplorerNVD']['con_timeout']) + self.max_cve_count = int(config['CveExplorerNVD']['max_cve_count']) + self.vuln_db_dir = config['CveExplorerNVD']['vuln_db_dir'] + self.nvd_name = config['CveExplorerNVD']['nvd_name'] + self.nvd_db_header = str(config['CveExplorerNVD']['nvd_db_header']).split('@') + self.nvd_year_name = config['CveExplorerNVD']['nvd_year_name'] + self.nvd_db_dir = os.path.join(self.full_path, self.vuln_db_dir) + self.nvd_path = os.path.join(self.full_path, os.path.join(self.vuln_db_dir, self.nvd_name)) + self.nvd_year_path = os.path.join(self.full_path, os.path.join(self.vuln_db_dir, self.nvd_year_name)) + self.cve_year_list = config['CveExplorerNVD']['cve_years'].split('@') + self.nvd_meta_url = config['CveExplorerNVD']['nvd_meta_url'] + self.nvd_zip_url = config['CveExplorerNVD']['nvd_zip_url'] + self.nvd_chk_date_regex = config['CveExplorerNVD']['nvd_chk_date_regex'] + self.nvd_chk_hash_regex = config['CveExplorerNVD']['nvd_chk_hash_regex'] + self.nvd_date_format = config['CveExplorerNVD']['nvd_date_format'] + self.db_colmns = {} + except Exception as e: + self.utility.print_message(FAIL, 'Reading config.ini is failure : {}'.format(e)) + self.utility.write_log(40, 'Reading config.ini is failure : {}'.format(e)) + sys.exit(1) + + # Create/Get vulnerability data base. + for idx, col_name in enumerate(self.nvd_db_header): + self.db_colmns[idx] = col_name + self.df_vuln_db = self.initialize_vuln_db() + + # Extract vulnerability information from NVD. + def extract_vuln_info(self, cve_items, cve_year, last_modified_date): + self.utility.write_log(20, '[In] Extract vulnerability information [{}]'.format(self.file_name)) + all_cve_list = [] + + # Get last modified date. + last_modified_date_value = last_modified_date + + for cve_item in cve_items['CVE_Items']: + # Get problem type (ex. CWE-**). + per_cve = cve_item['cve'] + problem_type_value = '' + problems = per_cve['problemtype']['problemtype_data'] + for description in problems: + for problem in description['description']: + problem_type_value = problem['value'] + + # Get description of vulnerability. + description_value = '' + for description in per_cve['description']['description_data']: + description_value = description['value'] + + # Get CVSS score. + cvss_score_v2_value = '' + cvss_score_v3_value = '' + impact = cve_item['impact'] + + # CVSS v3 score. + if 'baseMetricV3' in impact: + cvss_score_v3_value = float(impact['baseMetricV3']['cvssV3']['baseScore']) + else: + cvss_score_v3_value = 0 + + # CVSS v2 score. + if 'baseMetricV2' in impact: + cvss_score_v2_value = format(impact['baseMetricV2']['cvssV2']['baseScore']) + else: + cvss_score_v2_value = 0 + + # Get data type and CVE id. + data_type_value = per_cve['data_type'] + cve_id_value = per_cve['CVE_data_meta']['ID'] + + # Get configuration of CPE 2.3. + some_cpe = [] + for nodes in cve_item['configurations']['nodes']: + if 'children' in nodes: + for child_node in nodes['children']: + if 'cpe_match' in child_node: + for cpe in child_node['cpe_match']: + some_cpe.append(cpe) + else: + if 'cpe_match' in nodes: + for cpe in nodes['cpe_match']: + some_cpe.append(cpe) + for per_cpe in some_cpe: + cpe23_list = per_cpe['cpe23Uri'].split(':') + category_value = cpe23_list[2] + vendor_name_value = cpe23_list[3] + product_name_value = cpe23_list[4] + version_value = cpe23_list[5] + update_value = cpe23_list[6] + edition_value = cpe23_list[7] + + # Add each item to list. + self.utility.print_message(OK, 'Extract CVE information : ' + '{}, Vendor={}, ' + 'Product={}, Version={}'.format(cve_id_value, + vendor_name_value, + product_name_value, + version_value)) + per_cve_list = [] + per_cve_list.append(last_modified_date_value) + per_cve_list.append(data_type_value) + per_cve_list.append(problem_type_value) + per_cve_list.append(cve_id_value) + per_cve_list.append(cvss_score_v2_value) + per_cve_list.append(cvss_score_v3_value) + per_cve_list.append(str(category_value).lower()) + per_cve_list.append(str(vendor_name_value).lower()) + per_cve_list.append(str(product_name_value).lower()) + per_cve_list.append(str(version_value).lower()) + per_cve_list.append(str(update_value).lower()) + per_cve_list.append(str(edition_value).lower()) + per_cve_list.append(description_value.replace('\r', ' ').replace('\n', ' ')) + all_cve_list.append(per_cve_list) + + # Create csv file. + db_path = self.nvd_year_path.replace('*', cve_year) + self.utility.write_log(20, 'Create yearly vulnerability database : {}.'.format(db_path)) + pd.DataFrame(all_cve_list).to_csv(db_path, header=False, index=False) + self.utility.write_log(20, '[Out] Extract vulnerability information [{}]'.format(self.file_name)) + + # Create vulnerability yearly data base: + def create_vuln_yearly_db(self, cve_year, last_modified_date): + # Get cve list from NVD. + self.utility.write_log(20, '[In] Create yearly vulnerability database [{}]'.format(self.file_name)) + http = urllib3.PoolManager(timeout=self.con_timeout) + target_url = self.nvd_zip_url.replace('*', cve_year) + tmp_file = os.path.join(self.nvd_db_dir, 'temp_' + cve_year + '.zip') + + # Download zip file (include cve list) and uncompress zip file. + target_json_name = '' + self.utility.write_log(20, 'Accessing : {}'.format(target_url)) + self.utility.print_message(OK, 'Get {} CVE list from {}'.format(cve_year, target_url)) + with http.request('GET', target_url, preload_content=False) as res, open(tmp_file, 'wb') as fout: + shutil.copyfileobj(res, fout) + + with zipfile.ZipFile(tmp_file, 'r') as downloaded_zip: + target_json_name = downloaded_zip.namelist()[0] + downloaded_zip.extractall(self.nvd_db_dir) + os.remove(tmp_file) + + # Create cve list of cve file. + yearly_cve_list = [] + with codecs.open(os.path.join(self.nvd_db_dir, target_json_name), 'r', encoding='utf-8') as fin: + self.extract_vuln_info(json.loads(fin.read().replace('\0', '')), cve_year, last_modified_date) + + self.utility.write_log(20, '[Out] Create yearly vulnerability database [{}]'.format(self.file_name)) + return yearly_cve_list + + # Initialize Vulnerability Data Base. + def initialize_vuln_db(self): + # Get vulnerabilities information. + self.utility.write_log(20, '[In] Initialize vulnerability database [{}].'.format(self.file_name)) + http = urllib3.PoolManager(timeout=self.con_timeout) + update_flag = False + for cve_year in self.cve_year_list: + # Get last modified date and file hash. + try: + # Get meta information. + target_url = self.nvd_meta_url.replace('*', cve_year) + self.utility.print_message(OK, 'Get {} meta information from {}'.format(cve_year, target_url)) + self.utility.write_log(20, 'Accessing : {}'.format(target_url)) + res_meta = http.request('GET', target_url) + obj_match = re.match(self.nvd_chk_date_regex, res_meta.data.decode('utf-8')) + last_modified_date = obj_match.group(obj_match.lastindex) + + year_db = self.nvd_year_path.replace('*', cve_year) + if os.path.exists(year_db) is True: + # Get existing data base. + df_year_db = pd.read_csv(year_db, + sep=',', + names=self.nvd_db_header, + header=None, + encoding='utf-8').fillna('') + + # Check last modified date. + db_cve_date = self.utility.transform_date_object(df_year_db['last_modified_date'][0], + self.nvd_date_format) + currently_cve_date = self.utility.transform_date_object(last_modified_date, self.nvd_date_format) + if db_cve_date < currently_cve_date: + # Create vulnerability data base. + self.utility.print_message(OK, 'Update {} : latest date={}, last modified date={}'. + format(year_db, + currently_cve_date.strftime(self.nvd_date_format), + db_cve_date.strftime(self.nvd_date_format))) + self.create_vuln_yearly_db(cve_year, last_modified_date) + update_flag = True + else: + self.utility.print_message(FAIL, 'Skip updating {} : no update from {}'. + format(year_db, db_cve_date.strftime(self.nvd_date_format))) + else: + # Create vulnerability data base. + self.create_vuln_yearly_db(cve_year, last_modified_date) + update_flag = True + except Exception as e: + self.utility.print_exception(e, 'Getting last modified date is failure.') + self.utility.write_log(30, 'Getting last modified date is failure.') + + df_vuln_db = None + if update_flag is True: + try: + # Load updating vulnerability data base each year. + self.utility.print_message(OK, 'Create vulnerability database : {}'.format(self.nvd_path)) + year_csv_list = glob.glob(os.path.join(self.nvd_db_dir, self.nvd_year_name)) + + # Create DataFrame. + cve_list = [] + for file in year_csv_list: + cve_list.append(pd.read_csv(file, sep=',', header=None, encoding='utf-8').fillna('')) + if len(cve_list) != 0: + # Create new vulnerability data base. + df_vuln_db = pd.concat(cve_list).rename(columns=self.db_colmns).sort_values(by=['cvss_v3_score', + 'cvss_v2_score'], + ascending=False) + df_vuln_db.to_csv(self.nvd_path, mode='w', index=False) + except Exception as e: + self.utility.print_exception(e, 'Creating vulnerability database is failure : {}.'.format(e)) + self.utility.write_log(30, 'Creating vulnerability database is failure : {}.'.format(e)) + else: + # Load existing vulnerability data base. + self.utility.print_message(OK, 'Load vulnerability database : {}'.format(self.nvd_path)) + df_vuln_db = pd.read_csv(self.nvd_path, sep=',', encoding='utf-8') + + self.utility.write_log(20, '[Out] Initialize vulnerability database [{}].'.format(self.file_name)) + return df_vuln_db + + # Explore CVE information. + def cve_explorer(self, product_list): + self.utility.write_log(20, '[In] Explore CVE information [{}].'.format(self.file_name)) + for prod_idx, product in enumerate(product_list): + self.utility.print_message(NOTE, 'Explore CVE of {}/{} from NVD.'.format(product[1], product[2])) + + df_selected_cve = None + cve_info = '' + if product[1] != '*' and product[3] != '*': + df_selected_cve = self.df_vuln_db[(self.df_vuln_db['vendor_name'] == product[1]) & + (self.df_vuln_db['product_name'] == product[2]) & + (self.df_vuln_db['version_value'] == product[3])] + elif product[1] != '*' and product[3] == '*': + df_selected_cve = self.df_vuln_db[(self.df_vuln_db['vendor_name'] == product[1]) & + (self.df_vuln_db['product_name'] == product[2])] + elif product[1] == '*' and product[3] != '*': + df_selected_cve = self.df_vuln_db[(self.df_vuln_db['product_name'] == product[2]) & + (self.df_vuln_db['version_value'] == product[3])] + else: + df_selected_cve = self.df_vuln_db[(self.df_vuln_db['product_name'] == product[2])] + for cve_idx, cve_id in enumerate(df_selected_cve['id'].drop_duplicates()): + msg = 'Find {} for {}/{} {}.'.format(cve_id, product[1], product[2], product[3]) + self.utility.print_message(WARNING, msg) + self.utility.write_log(30, msg) + cve_info += cve_id + '\n' + if cve_idx == (self.max_cve_count - 1): + break + # Insert CVE to product list. + if cve_info == '': + cve_info = 'Cannot search.' + product_list[prod_idx].insert(len(product), cve_info) + + self.utility.write_log(20, '[Out] Explore CVE information [{}].'.format(self.file_name)) + return product_list diff --git a/modules/Gyoi_ErrorChecker.py b/modules/Gyoi_ErrorChecker.py new file mode 100644 index 0000000..ebf9c54 --- /dev/null +++ b/modules/Gyoi_ErrorChecker.py @@ -0,0 +1,62 @@ +#!/bin/env python +# -*- coding: utf-8 -*- +import os +import sys +import codecs +import re +import configparser + +# Type of printing. +OK = 'ok' # [*] +NOTE = 'note' # [+] +FAIL = 'fail' # [-] +WARNING = 'warn' # [!] +NONE = 'none' # No label. + + +class ErrorChecker: + def __init__(self, utility): + # Read config.ini. + self.utility = utility + config = configparser.ConfigParser() + self.file_name = os.path.basename(__file__) + self.full_path = os.path.dirname(os.path.abspath(__file__)) + self.root_path = os.path.join(self.full_path, '../') + config.read(os.path.join(self.root_path, 'config.ini')) + + try: + self.signature_dir = os.path.join(self.root_path, config['Common']['signature_path']) + self.signature_file = config['ErrorChecker']['signature_file'] + self.signature_path = os.path.join(self.signature_dir, self.signature_file) + except Exception as e: + self.utility.print_message(FAIL, 'Reading config.ini is failure : {}'.format(e)) + self.utility.write_log(40, 'Reading config.ini is failure : {}'.format(e)) + sys.exit(1) + + # Check unnecessary error message. + def get_error_message(self, response): + self.utility.print_message(NOTE, 'Check unnecessary error message.') + self.utility.write_log(20, '[In] Check unnecessary error message [{}].'.format(self.file_name)) + + # Check comment. + error_list = [] + try: + # Open signature file. + with codecs.open(self.signature_path, 'r', 'utf-8') as fin: + matching_patterns = fin.readlines() + for pattern in matching_patterns: + list_match = re.findall(pattern, response, flags=re.IGNORECASE) + + if len(list_match) != 0: + error_list.extend(list_match) + msg = 'Find unnecessary error message : {}'.format(list_match) + self.utility.print_message(OK, msg) + self.utility.write_log(20, msg) + except Exception as e: + msg = 'Getting error message is failure : {}.'.format(e) + self.utility.print_exception(e, msg) + self.utility.write_log(30, msg) + self.utility.write_log(20, '[Out] Check unnecessary error message [{}].'.format(self.file_name)) + if len(error_list) == 0: + self.utility.print_message(OK, 'Unnecessary error message not found.') + return list(set(error_list)) diff --git a/modules/Gyoi_Exploit.py b/modules/Gyoi_Exploit.py new file mode 100644 index 0000000..b56bee7 --- /dev/null +++ b/modules/Gyoi_Exploit.py @@ -0,0 +1,533 @@ +#!/bin/env python +# -*- coding: utf-8 -*- +import sys +import os +import csv +import json +import time +import re +import codecs +import configparser +import msgpack +import http.client +import pandas as pd +sys.path.append('../') +from util import Utilty + +# Type of printing. +OK = 'ok' # [*] +NOTE = 'note' # [+] +FAIL = 'fail' # [-] +WARNING = 'warn' # [!] +NONE = 'none' # No label. + + +# Interface of Metasploit. +class Msgrpc: + def __init__(self, option=[]): + self.utility = Utilty() + self.host = option.get('host') or "127.0.0.1" + self.port = option.get('port') or 55552 + self.uri = option.get('uri') or "/api/" + self.ssl = option.get('ssl') or False + self.authenticated = False + self.token = False + self.headers = {"Content-type": "binary/message-pack"} + if self.ssl: + self.client = http.client.HTTPSConnection(self.host, self.port) + else: + self.client = http.client.HTTPConnection(self.host, self.port) + + # Call RPC API. + def call(self, meth, option): + if meth != "auth.login": + if not self.authenticated: + self.utility.print_message(FAIL, 'MsfRPC: Not Authenticated') + exit(1) + + if meth != "auth.login": + option.insert(0, self.token) + + option.insert(0, meth) + params = msgpack.packb(option) + self.client.request("POST", self.uri, params, self.headers) + resp = self.client.getresponse() + return msgpack.unpackb(resp.read()) + + # Log in to RPC Server. + def login(self, user, password): + ret = self.call('auth.login', [user, password]) + if ret.get(b'result') == b'success': + self.authenticated = True + self.token = ret.get(b'token') + return True + else: + self.utility.print_message(FAIL, 'MsfRPC: Not Authenticated') + exit(1) + + # Send Metasploit command. + def send_command(self, console_id, command, visualization, sleep=0.1): + _ = self.call('console.write', [console_id, command]) + time.sleep(sleep) + ret = self.call('console.read', [console_id]) + if visualization: + try: + self.utility.print_message(NONE, '{}'.format(ret.get(b'data').decode('utf-8'))) + except Exception as e: + self.utility.print_exception(e, 'Send_command is exception.') + return ret + + # Get all modules. + def get_module_list(self, module_type): + ret = {} + if module_type == 'exploit': + ret = self.call('module.exploits', []) + elif module_type == 'auxiliary': + ret = self.call('module.auxiliary', []) + elif module_type == 'post': + ret = self.call('module.post', []) + elif module_type == 'payload': + ret = self.call('module.payloads', []) + elif module_type == 'encoder': + ret = self.call('module.encoders', []) + elif module_type == 'nop': + ret = self.call('module.nops', []) + byte_list = ret[b'modules'] + string_list = [] + for module in byte_list: + string_list.append(module.decode('utf-8')) + return string_list + + # Get module detail information. + def get_module_info(self, module_type, module_name): + return self.call('module.info', [module_type, module_name]) + + # Get payload that compatible module. + def get_compatible_payload_list(self, module_name): + ret = self.call('module.compatible_payloads', [module_name]) + byte_list = ret[b'payloads'] + string_list = [] + for module in byte_list: + string_list.append(module.decode('utf-8')) + return string_list + + # Get payload that compatible target. + def get_target_compatible_payload_list(self, module_name, target_num): + ret = self.call('module.target_compatible_payloads', [module_name, target_num]) + byte_list = ret[b'payloads'] + string_list = [] + for module in byte_list: + string_list.append(module.decode('utf-8')) + return string_list + + # Get module options. + def get_module_options(self, module_type, module_name): + return self.call('module.options', [module_type, module_name]) + + # Execute module. + def execute_module(self, module_type, module_name, options): + ret = self.call('module.execute', [module_type, module_name, options]) + job_id = ret[b'job_id'] + uuid = ret[b'uuid'].decode('utf-8') + return job_id, uuid + + # Get job list. + def get_job_list(self): + jobs = self.call('job.list', []) + byte_list = jobs.keys() + job_list = [] + for job_id in byte_list: + job_list.append(int(job_id.decode('utf-8'))) + return job_list + + # Get job detail information. + def get_job_info(self, job_id): + return self.call('job.info', [job_id]) + + # Stop job. + def stop_job(self, job_id): + return self.call('job.stop', [job_id]) + + # Get session list. + def get_session_list(self): + return self.call('session.list', []) + + # Stop shell session. + def stop_session(self, session_id): + _ = self.call('session.stop', [str(session_id)]) + + # Stop meterpreter session. + def stop_meterpreter_session_kill(self, session_id): + _ = self.call('session.meterpreter_session_kill', [str(session_id)]) + + # Log out from RPC Server. + def logout(self): + ret = self.call('auth.logout', [self.token]) + if ret.get(b'result') == b'success': + self.authenticated = False + self.token = '' + return True + else: + self.utility.print_message(FAIL, 'MsfRPC: Not Authenticated') + exit(1) + + # Disconnection. + def termination(self, console_id): + # Kill a console. + _ = self.call('console.session_kill', [console_id]) + # Log out + _ = self.logout() + + +# Metasploit's environment. +class Exploit: + def __init__(self, utility): + # Read config file. + self.utility = utility + self.file_name = os.path.basename(__file__) + self.full_path = os.path.dirname(os.path.abspath(__file__)) + self.root_path = os.path.join(self.full_path, '../') + config = configparser.ConfigParser() + try: + config.read(os.path.join(self.root_path, 'config.ini')) + except Exception as e: + self.utility.print_message(FAIL, 'Reading config.ini is failure : {}'.format(e)) + self.utility.write_log(40, 'Reading config.ini is failure : {}'.format(e)) + sys.exit(1) + + server_host = config['Exploit']['server_host'] + server_port = config['Exploit']['server_port'] + msgrpc_user = config['Exploit']['msgrpc_user'] + msgrpc_password = config['Exploit']['msgrpc_pass'] + self.data_path = os.path.join(self.full_path, config['Exploit']['data_path']) + self.converion_table_path = os.path.join(self.data_path, config['Exploit']['conversion_table']) + self.timeout = int(config['Exploit']['timeout']) + self.report_path = os.path.join(self.full_path, config['Report']['report_path']) + self.report_temp = os.path.join(self.report_path, config['Report']['report_temp']) + + # Create Metasploit's instance. + self.client = Msgrpc({'host': server_host, 'port': server_port}) + self.client.login(msgrpc_user, msgrpc_password) + self.console_id = self.get_console() + + # Parse. + def cutting_strings(self, pattern, target): + return re.findall(pattern, target) + + # Convert product name nvd style to metasploit style. + def convert_product_name(self, product_list): + # Get Conversion table. + df_origin = pd.read_csv(self.converion_table_path, encoding='utf-8').fillna('') + target_list = [] + df_selected_product = None + for product in product_list: + if product[0] != '*' and product[1] != '*': + df_selected_product = df_origin[(df_origin['nvd_vendor'] == product[0]) & + (df_origin['nvd_name'] == product[1])] + elif product[0] == '*' and product[1] != '*': + df_selected_product = df_origin[(df_origin['nvd_name'] == product[1])] + else: + self.utility.print_message(WARNING, 'Not exist product name: {}'.format(product)) + continue + + # Extract product name for metasploit. + if df_selected_product is not None: + for target in df_selected_product['metasploit']: + target_list.append(target) + return target_list + + # Create MSFconsole. + def get_console(self): + # Create a console. + ret = self.client.call('console.create', []) + console_id = ret.get(b'id') + ret = self.client.call('console.read', [console_id]) + return console_id + + # Get all Exploit module list. + def get_all_exploit_list(self): + self.utility.print_message(NOTE, 'Get exploit list.') + self.utility.write_log(20, '[In] Get exploit list [{}].'.format(self.file_name)) + all_exploit_list = [] + if os.path.exists(os.path.join(self.data_path, 'exploit_list.csv')) is False: + self.utility.print_message(OK, 'Loading exploit list from Metasploit.') + + # Get Exploit module list. + all_exploit_list = [] + exploit_candidate_list = self.client.get_module_list('exploit') + for exploit in exploit_candidate_list: + module_info = self.client.get_module_info('exploit', exploit) + if module_info[b'rank'].decode('utf-8') in {'excellent', 'great', 'good'}: + all_exploit_list.append(exploit) + + # Save Exploit module list to local file. + self.utility.print_message(OK, 'Loaded exploit num: {}'.format(str(len(all_exploit_list)))) + with codecs.open(os.path.join(self.data_path, 'exploit_list.csv'), 'w', 'utf-8') as fout: + for item in all_exploit_list: + fout.write(item + '\n') + self.utility.print_message(OK, 'Saved exploit list.') + else: + # Get exploit module list from local file. + local_file = os.path.join(self.data_path, 'exploit_list.csv') + self.utility.print_message(OK, 'Loading exploit list from local file: {}'.format(local_file)) + with codecs.open(local_file, 'r', 'utf-8') as fin: + for item in fin: + all_exploit_list.append(item.rstrip('\n')) + self.utility.write_log(20, '[Out] Get exploit list [{}].'.format(self.file_name)) + return all_exploit_list + + # Create exploit tree. + def get_exploit_tree(self, all_exploit_list): + self.utility.write_log(20, '[In] Get exploit tree [{}].'.format(self.file_name)) + self.utility.print_message(NOTE, 'Get exploit tree.') + exploit_tree = {} + if os.path.exists(os.path.join(self.data_path, 'exploit_tree.json')) is False: + for idx, exploit in enumerate(all_exploit_list): + exploit = exploit.replace('\n', '').replace('\r', '') + temp_target_tree = {'targets': []} + temp_tree = {} + # Set exploit module. + use_cmd = 'use exploit/' + exploit + '\n' + _ = self.client.send_command(self.console_id, use_cmd, False) + + # Get target. + show_cmd = 'show targets\n' + target_info = '' + time_count = 0 + while True: + ret = self.client.send_command(self.console_id, show_cmd, False) + target_info = ret.get(b'data').decode('utf-8') + if 'Exploit targets' in target_info: + break + if time_count == 5: + self.utility.print_message(WARNING, 'Timeout: {}'.format(show_cmd)) + self.utility.print_message(WARNING, 'No exist Targets.') + break + time.sleep(1.0) + time_count += 1 + target_list = self.cutting_strings(r'\s*([0-9]{1,3}) .*[a-z|A-Z|0-9].*[\r\n]', target_info) + for target in target_list: + # Get payload list. + payload_list = self.client.get_target_compatible_payload_list(exploit, int(target)) + temp_tree[target] = payload_list + + # Get options. + options = self.client.get_module_options('exploit', exploit) + key_list = options.keys() + option = {} + for key in key_list: + sub_option = {} + sub_key_list = options[key].keys() + for sub_key in sub_key_list: + if isinstance(options[key][sub_key], list): + end_option = [] + for end_key in options[key][sub_key]: + end_option.append(end_key.decode('utf-8')) + sub_option[sub_key.decode('utf-8')] = end_option + else: + end_option = {} + if isinstance(options[key][sub_key], bytes): + sub_option[sub_key.decode('utf-8')] = options[key][sub_key].decode('utf-8') + else: + sub_option[sub_key.decode('utf-8')] = options[key][sub_key] + + # User specify. + sub_option['user_specify'] = '' + option[key.decode('utf-8')] = sub_option + + # Add payloads and targets to exploit tree. + temp_target_tree['target_list'] = target_list + temp_target_tree['targets'] = temp_tree + temp_target_tree['options'] = option + exploit_tree[exploit] = temp_target_tree + # Output processing status to console. + msg = '{}/{} exploit:{}, targets:{}'.format(str(idx + 1), + len(all_exploit_list), + exploit, + len(target_list)) + self.utility.print_message(OK, msg) + + # Save exploit tree to local file. + with codecs.open(os.path.join(self.data_path, 'exploit_tree.json'), 'w', 'utf-8') as fout: + json.dump(exploit_tree, fout, indent=4) + self.utility.print_message(OK, 'Saved exploit tree.') + else: + # Get exploit tree from local file. + exploit_tree = {} + local_file = os.path.join(self.data_path, 'exploit_tree.json') + self.utility.print_message(OK, 'Loading exploit tree from local file: {}'.format(local_file)) + with codecs.open(local_file, 'r', 'utf-8') as fin: + exploit_tree = json.load(fin) + self.utility.write_log(20, '[Out] Get exploit tree [{}].'.format(self.file_name)) + return exploit_tree + + # Get exploit module list for product. + def get_exploit_list(self, prod_name): + self.utility.write_log(20, '[In] Get exploit list [{}].'.format(self.file_name)) + module_list = [] + search_cmd = 'search name:' + prod_name + ' type:exploit app:server\n' + ret = self.client.send_command(self.console_id, search_cmd, False, 3.0) + raw_module_info = ret.get(b'data').decode('utf-8') + exploit_candidate_list = self.cutting_strings(r'(exploit/.*)', raw_module_info) + for exploit in exploit_candidate_list: + raw_exploit_info = exploit.split(' ') + exploit_info = list(filter(lambda s: s != '', raw_exploit_info)) + if exploit_info[2] in {'excellent', 'great', 'good'}: + module_list.append(exploit_info[0]) + self.utility.write_log(20, '[Out] Get exploit list [{}].'.format(self.file_name)) + return module_list + + # Get target list. + def get_target_list(self): + self.utility.write_log(20, '[In] Get target list [{}].'.format(self.file_name)) + ret = self.client.send_command(self.console_id, 'show targets\n', False, 3.0) + target_info = ret.get(b'data').decode('utf-8') + target_list = self.cutting_strings(r'\s+([0-9]{1,3}).*[a-z|A-Z|0-9].*[\r\n]', target_info) + self.utility.write_log(20, '[Out] Get target list [{}].'.format(self.file_name)) + return target_list + + # Set Metasploit options. + def set_options(self, target_ip, target_port, exploit, payload, exploit_tree): + self.utility.write_log(20, '[In] Set option [{}].'.format(self.file_name)) + options = exploit_tree[exploit]['options'] + key_list = options.keys() + option = {} + for key in key_list: + if options[key]['required'] is True: + sub_key_list = options[key].keys() + if 'default' in sub_key_list: + # If "user_specify" is not null, set "user_specify" value to the key. + if options[key]['user_specify'] == '': + option[key] = options[key]['default'] + else: + option[key] = options[key]['user_specify'] + else: + option[key] = '0' + option['RHOST'] = target_ip + option['RPORT'] = target_port + if payload != '': + option['PAYLOAD'] = payload + self.utility.write_log(20, '[Out] Set option [{}].'.format(self.file_name)) + return option + + # Run exploit. + def exploit(self, target=[]): + self.utility.write_log(20, '[In] Execute exploit [{}].'.format(self.file_name)) + + # Get target info. + target_ip = target.get('ip') + target_port = target.get('port') + product_list = target.get('prod_list') + + # Convert product name nvd style to metasploit style. + target_list = list(set(self.convert_product_name(product_list))) + + # Get all exploit list. + all_exploit_list = self.get_all_exploit_list() + exploit_tree = self.get_exploit_tree(all_exploit_list) + + # Get exploit modules link with product. + for prod_name in target_list: + module_list = self.get_exploit_list(prod_name) + for exploit_module in module_list: + # Set exploit module. + _ = self.client.send_command(self.console_id, 'use ' + exploit_module + '\n', False, 1.0) + + # Get target list. + target_list = self.get_target_list() + + # Send payload to target server while changing target. + for target in target_list: + result = '' + # Get payload list link with target. + payload_list = self.client.get_target_compatible_payload_list(exploit_module, int(target)) + for payload in payload_list: + # Set options. + option = self.set_options(target_ip, target_port, exploit_module[8:], payload, exploit_tree) + + # Run exploit. + job_id, uuid = self.client.execute_module('exploit', exploit_module, option) + + # Judgement. + if uuid is not None: + # Waiting for running is finish (maximum wait time is "self.timeout (sec)". + time_count = 0 + while True: + # Get job list. + job_id_list = self.client.get_job_list() + if job_id in job_id_list: + time.sleep(1) + else: + break + if self.timeout == time_count: + # Delete job. + result = 'timeout' + self.client.stop_job(str(job_id)) + break + time_count += 1 + # Get session list. + sessions = self.client.get_session_list() + key_list = sessions.keys() + if len(key_list) != 0: + for key in key_list: + # If session list include target exploit uuid, + # it probably succeeded exploitation. + exploit_uuid = sessions[key][b'exploit_uuid'].decode('utf-8') + if uuid == exploit_uuid: + result = 'bingo!!' + + # Gather reporting items. + session_type = sessions[key][b'type'].decode('utf-8') + session_port = str(sessions[key][b'session_port']) + session_exploit = sessions[key][b'via_exploit'].decode('utf-8') + session_payload = sessions[key][b'via_payload'].decode('utf-8') + module_info = self.client.get_module_info('exploit', session_exploit) + vuln_name = module_info[b'name'].decode('utf-8') + description = module_info[b'description'].decode('utf-8') + ref_list = module_info[b'references'] + reference = '' + for item in ref_list: + reference += '[' + item[0].decode('utf-8') + ']' + '@' + item[1].decode( + 'utf-8') + '@@' + + # Logging target information for reporting. + with open(os.path.join(self.report_path, self.report_temp), 'a') as fout: + bingo = [target_ip, + session_port, + prod_name, + vuln_name, + session_type, + description, + session_exploit, + target, + session_payload, + reference] + writer = csv.writer(fout) + writer.writerow(bingo) + + # Disconnect all session for next exploit. + self.client.stop_session(key) + self.client.stop_meterpreter_session_kill(key) + break + else: + # If session list doesn't target exploit uuid, + # it failed exploitation. + result = 'failure' + else: + # If session list is empty, it failed exploitation. + result = 'failure' + else: + # Time out. + result = 'timeout' + + # Output result to console. + string_color = '' + msg = '{}, target: {}, payload: {}, result: {}'.format(exploit_module, target, payload, result) + if result == 'bingo!!': + self.utility.print_message(OK, msg) + else: + self.utility.print_message(WARNING, msg) + + # Terminate + self.client.termination(self.console_id) + self.utility.write_log(20, '[Out] Execute exploit [{}].'.format(self.file_name)) diff --git a/modules/Gyoi_GoogleHack.py b/modules/Gyoi_GoogleHack.py new file mode 100644 index 0000000..4e5a878 --- /dev/null +++ b/modules/Gyoi_GoogleHack.py @@ -0,0 +1,220 @@ +#!/bin/env python +# -*- coding: utf-8 -*- +import os +import sys +import time +import codecs +import re +import urllib3 +import configparser +from googleapiclient.discovery import build +from urllib3.exceptions import InsecureRequestWarning +urllib3.disable_warnings(InsecureRequestWarning) + +# Type of printing. +OK = 'ok' # [*] +NOTE = 'note' # [+] +FAIL = 'fail' # [-] +WARNING = 'warn' # [!] +NONE = 'none' # No label. + + +class GoogleCustomSearch: + def __init__(self, utility): + # Read config.ini. + self.utility = utility + config = configparser.ConfigParser() + self.file_name = os.path.basename(__file__) + self.full_path = os.path.dirname(os.path.abspath(__file__)) + self.root_path = os.path.join(self.full_path, '../') + config.read(os.path.join(self.root_path, 'config.ini')) + + try: + self.signature_dir = os.path.join(self.root_path, config['Common']['signature_path']) + self.method_name = config['Common']['method_search'] + self.api_key = config['GoogleHack']['api_key'] + self.search_engine_id = config['GoogleHack']['search_engine_id'] + self.signature_file = config['GoogleHack']['signature_file'] + self.api_strict_key = config['GoogleHack']['api_strict_key'] + self.api_strict_value = config['GoogleHack']['api_strict_value'] + self.start_index = int(config['GoogleHack']['start_index']) + self.delay_time = float(config['GoogleHack']['delay_time']) + self.delay_time_direct_access = float(config['ContentExplorer']['delay_time']) + except Exception as e: + self.utility.print_message(FAIL, 'Reading config.ini is failure : {}'.format(e)) + self.utility.write_log(40, 'Reading config.ini is failure : {}'.format(e)) + sys.exit(1) + + # Check product version. + def check_version(self, default_ver, version_pattern, response): + # Check version. + version = default_ver + if version_pattern != '*': + obj_match = re.search(version_pattern, response, flags=re.IGNORECASE) + if obj_match is not None and obj_match.re.groups > 1: + version = obj_match.group(2) + return version + + # Examine HTTP response. + def examine_response(self, check_pattern, default_ver, version_pattern, response): + self.utility.print_message(NOTE, 'Confirm string matching.') + self.utility.write_log(20, '[In] Confirm string matching [{}].'.format(self.file_name)) + + # Check existing contents. + result = [] + if check_pattern != '*' and re.search(check_pattern, response, flags=re.IGNORECASE) is not None: + result.append(True) + # Check product version. + result.append(self.check_version(default_ver, version_pattern, response)) + elif check_pattern == '*': + result.append(True) + # Check product version. + result.append(self.check_version(default_ver, version_pattern, response)) + else: + result.append(False) + result.append(default_ver) + return result + + def execute_google_hack(self, cve_explorer, fqdn, path, report): + self.utility.print_message(NOTE, 'Execute Google hack.') + self.utility.write_log(20, '[In] Execute Google hack [{}].'.format(self.file_name)) + + # Open signature file. + signature_file = os.path.join(self.signature_dir, self.signature_file) + product_list = [] + with codecs.open(signature_file, 'r', encoding='utf-8') as fin: + signatures = fin.readlines() + + # Execute Google search. + for idx, signature in enumerate(signatures): + items = signature.replace('\n', '').replace('\r', '').split('@') + if len(items) != 8: + self.utility.print_message(WARNING, 'Invalid signature: {}'.format(signature)) + continue + category = items[0] + vendor = items[1].lower() + product_name = items[2].lower() + default_ver = items[3] + search_option = items[4] + check_pattern = items[5] + version_pattern = items[6] + is_login = items[7] + query = 'site:' + fqdn + ' ' + search_option + date = self.utility.get_current_date('%Y%m%d%H%M%S%f')[:-3] + print_date = self.utility.transform_date_string( + self.utility.transform_date_object(date[:-3], '%Y%m%d%H%M%S')) + + # Execute. + urls, result_count = self.custom_search(query, self.start_index) + + msg = '{}/{} Execute query: {}'.format(idx + 1, len(signature), query) + self.utility.print_message(OK, msg) + self.utility.write_log(20, msg) + + if result_count != 0: + if check_pattern != '*' or version_pattern != '*': + for url_idx, target_url in enumerate(urls): + # Get HTTP response (header + body). + date = self.utility.get_current_date('%Y%m%d%H%M%S%f')[:-3] + res, server_header, res_header, res_body = self.utility.send_request('GET', target_url) + msg = '{}/{} Accessing : Status: {}, Url: {}'.format(url_idx + 1, + len(urls), + res.status, + target_url) + self.utility.print_message(OK, msg) + self.utility.write_log(20, msg) + + # Write log. + log_name = 'google_custom_search_' + fqdn + '_' + date + '.log' + log_path_fqdn = os.path.join(os.path.join(self.root_path, 'logs'), + fqdn + '_' + path.replace('/', '')) + if os.path.exists(log_path_fqdn) is False: + os.mkdir(log_path_fqdn) + log_file = os.path.join(log_path_fqdn, log_name) + with codecs.open(log_file, 'w', 'utf-8') as fout: + fout.write(target_url + '\n\n' + res_header + res_body) + + # Examine HTTP response. + result = self.examine_response(check_pattern, + default_ver, + version_pattern, + res_header + res_body) + + if result[0] is True: + # Found unnecessary content or CMS admin page. + product = [category, vendor, product_name, result[1], target_url] + product = cve_explorer.cve_explorer([product]) + product_list.extend(product) + msg = 'Find product={}/{}, verson={}, trigger={}'.format(vendor, product_name, + default_ver, target_url) + self.utility.print_message(OK, msg) + self.utility.write_log(20, msg) + + # Create report. + page_type = {} + if is_login == '1': + page_type = {'ml': {'prob': '-', 'reason': '-'}, + 'url': {'prob': '100%', 'reason': target_url}} + report.create_report_body(target_url, fqdn, path, target_url, '*', self.method_name, + product, page_type, [], [], server_header, log_file, print_date) + + time.sleep(self.delay_time_direct_access) + else: + # Found search result. + product = [category, vendor, product_name, default_ver, query] + product = cve_explorer.cve_explorer([product]) + product_list.append(product) + msg = 'Detected default content: {}/{}'.format(vendor, product_name) + self.utility.print_message(OK, msg) + self.utility.write_log(20, msg) + + page_type = {} + if is_login == 1: + page_type = {'ml': {'prob': '-', 'reason': '-'}, + 'url': {'prob': '100%', 'reason': search_option}} + report.create_report_body('-', fqdn, '*', '*', self.method_name, product, page_type, + [], [], '*', '*', print_date) + + time.sleep(self.delay_time) + self.utility.write_log(20, '[Out] Execute Google custom search [{}].'.format(self.file_name)) + return product_list + + # APIのアクセスはIPで制限 + # 制限の設定はGCP consoleで実施。 + def custom_search(self, query, start_index=1): + # Google Custom Search API. + self.utility.write_log(20, '[In] Execute Google custom search [{}].'.format(self.file_name)) + + # Setting of Google Custom Search. + service = build("customsearch", "v1", developerKey=self.api_key) + response = [] + urls = [] + result_count = 0 + + # Execute search. + try: + response.append(service.cse().list( + q=query, + cx=self.search_engine_id, + num=10, + start=self.start_index + ).execute()) + + # Get finding counts. + result_count = int(response[0].get('searchInformation').get('totalResults')) + + # Get extracted link (url). + if result_count != 0: + items = response[0]['items'] + for item in items: + urls.append(item['link']) + + except Exception as e: + msg = 'Google custom search is failure : {}'.format(e) + self.utility.print_exception(e, msg) + self.utility.write_log(30, msg) + self.utility.write_log(20, '[Out] Execute Google custom search [{}].'.format(self.file_name)) + return urls, result_count + + self.utility.write_log(20, '[Out] Execute Google custom search [{}].'.format(self.file_name)) + return urls, result_count diff --git a/modules/Gyoi_PageTypeChecker.py b/modules/Gyoi_PageTypeChecker.py new file mode 100644 index 0000000..f338085 --- /dev/null +++ b/modules/Gyoi_PageTypeChecker.py @@ -0,0 +1,143 @@ +#!/bin/env python +# -*- coding: utf-8 -*- +import os +import sys +import codecs +import re +import configparser +import pickle +from .NaiveBayes import NaiveBayes + +# Type of printing. +OK = 'ok' # [*] +NOTE = 'note' # [+] +FAIL = 'fail' # [-] +WARNING = 'warn' # [!] +NONE = 'none' # No label. + + +class PageChecker: + def __init__(self, utility): + # Read config.ini. + self.utility = utility + config = configparser.ConfigParser() + self.file_name = os.path.basename(__file__) + self.full_path = os.path.dirname(os.path.abspath(__file__)) + self.root_path = os.path.join(self.full_path, '../') + config.read(os.path.join(self.root_path, 'config.ini')) + + try: + self.train_path = os.path.join(self.full_path, config['PageChecker']['train_path']) + self.train_file = os.path.join(self.train_path, config['PageChecker']['train_page']) + self.trained_path = os.path.join(self.full_path, config['PageChecker']['trained_path']) + self.trained_file = os.path.join(self.trained_path, config['PageChecker']['trained_page']) + self.signatures_dir = os.path.join(self.root_path, config['Common']['signature_path']) + self.signature_file = os.path.join(self.signatures_dir, config['PageChecker']['signature_file']) + except Exception as e: + self.utility.print_message(FAIL, 'Reading config.ini is failure : {}'.format(e)) + self.utility.write_log(40, 'Reading config.ini is failure : {}'.format(e)) + sys.exit(1) + + # Judge page type. + def judge_page_type(self, target_url, response): + self.utility.print_message(NOTE, 'Judge page type.') + self.utility.write_log(20, '[In] Judge page type [{}].'.format(self.file_name)) + # page_type = {'ml': {'type': 'unknown', 'reason': '-'}, 'url': {'type': 'unknown', 'reason': '-'}} + page_type = {'ml': {'prob': '-', 'reason': '-'}, 'url': {'prob': '-', 'reason': '-'}} + + # Learning. + nb = self.train(self.train_file, self.trained_file) + if nb is None: + self.utility.write_log(20, '[Out] Judge page type [{}].'.format(self.file_name)) + return 'unknown' + + # Predict page type using Naive Bayes. + predict_result, prob, keywords, classified_list = nb.classify(response) + if len(keywords) == 0: + self.utility.print_message(OK, 'Page type is unknown.') + else: + page_type['ml']['prob'] = str(round(prob*100, 2)) + page_type['ml']['reason'] = ','.join(keywords) + msg = 'ML: Page type={}/{}%, reason={}'.format(predict_result, + round(prob*100, 2), + page_type['ml']['reason']) + self.utility.print_message(OK, msg) + self.utility.write_log(20, msg) + + # Predict page type using URL. + predict_result, page_type['url']['prob'], page_type['url']['reason'] = self.predict_page_type(target_url) + msg = 'URL: Page type={}/{}%, reason={}'.format(predict_result, + page_type['url']['prob'], + page_type['url']['reason']) + self.utility.print_message(OK, msg) + self.utility.write_log(20, msg) + + self.utility.write_log(20, '[Out] Judge page type [{}].'.format(self.file_name)) + return page_type + + # Predict page type using URL. + def predict_page_type(self, target_url): + self.utility.write_log(20, '[In] Predict page type [{}].'.format(self.file_name)) + try: + # Identify product name and version. + with codecs.open(self.signature_file, 'r', 'utf-8') as fin: + matching_patterns = fin.readlines() + for pattern in matching_patterns: + items = pattern.replace('\r', '').replace('\n', '').split('@') + page_type = items[0] + signature = items[1] + obj_match = re.search(signature, target_url, flags=re.IGNORECASE) + + # Judge page type. + if obj_match is not None: + msg = 'Identify page type : page type={}/100%, url={}'.format(page_type, target_url) + self.utility.print_message(OK, msg) + self.utility.write_log(20, msg) + self.utility.write_log(20, '[Out] Predict page type [{}].'.format(self.file_name)) + return page_type, '100.0', obj_match.group(1) + + except Exception as e: + msg = 'Prediction page type is failure : {}'.format(e) + self.utility.print_exception(e, msg) + self.utility.write_log(30, msg) + self.utility.write_log(20, '[Out] Predict page type [{}].'.format(self.file_name)) + return 'Login', '0.0', '-' + + # Execute learning / Get learned data. + def train(self, in_file, out_file): + self.utility.write_log(20, '[In] Train model [{}].'.format(self.file_name)) + nb = None + try: + # If existing learned data (pkl), load learned data. + if os.path.exists(out_file): + msg = 'Load trained file : {}'.format(out_file) + self.utility.print_message(OK, msg) + self.utility.write_log(20, msg) + with open(out_file, 'rb') as fin: + nb = pickle.load(fin) + # If no learned data, execute learning. + else: + msg = 'Train model : {}'.format(in_file) + self.utility.print_message(OK, msg) + self.utility.write_log(20, msg) + nb = NaiveBayes() + fin = codecs.open(in_file, 'r', 'utf-8') + lines = fin.readlines() + fin.close() + items = [] + + for line in lines: + words = line[:-2] + train_words = words.split('@') + items.append(train_words[1]) + nb.train(train_words[1], train_words[0]) + + # Save learned data to pkl file. + with open(out_file, 'wb') as f: + pickle.dump(nb, f) + except Exception as e: + msg = 'Training model is failure : {}'.format(e) + self.utility.print_exception(e, msg) + self.utility.write_log(30, msg) + self.utility.write_log(20, '[Out] Train model [{}].'.format(self.file_name)) + return nb diff --git a/modules/Gyoi_Report.py b/modules/Gyoi_Report.py new file mode 100644 index 0000000..a972236 --- /dev/null +++ b/modules/Gyoi_Report.py @@ -0,0 +1,154 @@ +#!/bin/env python +# -*- coding: utf-8 -*- +import sys +import os +import copy +import glob +import configparser +import pandas as pd +from jinja2 import Environment, FileSystemLoader + +# Type of printing. +OK = 'ok' # [*] +NOTE = 'note' # [+] +FAIL = 'fail' # [-] +WARNING = 'warn' # [!] +NONE = 'none' # No label. + + +# Create report. +class CreateReport: + def __init__(self, utility): + self.utility = utility + # Read config file. + config = configparser.ConfigParser() + self.file_name = os.path.basename(__file__) + self.full_path = os.path.dirname(os.path.abspath(__file__)) + self.root_path = os.path.join(self.full_path, '../') + config.read(os.path.join(self.root_path, 'config.ini')) + + try: + self.report_dir = os.path.join(self.root_path, config['Report']['report_path']) + self.report_path = os.path.join(self.report_dir, config['Report']['report_name']) + self.report_path_exploit = os.path.join(self.report_dir, config['Report']['report_name_exploit']) + self.report_temp = config['Report']['report_temp'] + self.template = config['Report']['template'] + self.header = str(config['Report']['header']).split('@') + + except Exception as e: + self.utility.print_message(FAIL, 'Reading config.ini is failure : {}'.format(e)) + self.utility.write_log(40, 'Reading config.ini is failure : {}'.format(e)) + sys.exit(1) + + # Create report's header. + def create_report_header(self, fqdn, path): + self.utility.print_message(NOTE, 'Create report header : {}'.format(self.report_path)) + self.utility.write_log(20, '[In] Create report header [{}].'.format(self.file_name)) + + report_file_name = self.report_path.replace('*', fqdn + '_' + path) + pd.DataFrame([], columns=self.header).to_csv(report_file_name, mode='w', index=False) + self.utility.write_log(20, '[Out] Create report header [{}].'.format(self.file_name)) + + # Create report's body. + def create_report_body(self, url, fqdn, path, port, cloud, method, products, type, comments, errors, srv_header, log_file, date): + self.utility.print_message(NOTE, 'Create {}:{} report\'s body.'.format(fqdn, port)) + self.utility.write_log(20, '[In] Create report body [{}].'.format(self.file_name)) + + # Build base structure. + report = [] + login_prob = '' + login_reason = '' + if len(type) != 0: + login_prob = 'Log : ' + type['ml']['prob'] + ' %\n' + 'Url : ' + type['url']['prob'] + ' %' + login_reason = 'Log : ' + type['ml']['reason'] + '\n' + 'Url : ' + type['url']['reason'] + else: + login_prob = '*' + login_reason = '*' + record = [] + record.insert(0, fqdn) # FQDN. + record.insert(1, self.utility.forward_lookup(fqdn)) # IP address. + record.insert(2, str(port)) # Port number. + record.insert(3, cloud) # Cloud service type. + record.insert(4, method) # Using method. + record.insert(5, url) # Target URL. + record.insert(6, '-') # Vendor name. + record.insert(7, '-') # Product name. + record.insert(8, '-') # Product version. + record.insert(9, '-') # Trigger of identified product. + record.insert(10, '-') # Product category. + record.insert(11, '-') # CVE number of product. + record.insert(12, login_prob) # Login probability. + record.insert(13, login_reason) # Trigger of login page. + record.insert(14, '-') # Unnecessary comments. + record.insert(15, '-') # Unnecessary Error messages. + record.insert(16, srv_header) # Server header. + record.insert(17, log_file) # Path of log file. + record.insert(18, date) # Creating date. + report.append(record) + + # Build prduct record. + for product in products: + product_record = copy.deepcopy(record) + product_record[6] = product[1] + product_record[7] = product[2] + product_record[8] = product[3] + product_record[9] = product[4] + product_record[10] = product[0] + product_record[11] = product[5] + report.append(product_record) + + # Build comment record. + for comment in comments: + comment_record = copy.deepcopy(record) + comment_record[14] = comment + report.append(comment_record) + + # Build error message record. + for error in errors: + error_record = copy.deepcopy(record) + error_record[15] = error + report.append(error_record) + + # Output report. + msg = 'Create report : {}'.format(self.report_path) + self.utility.print_message(OK, msg) + self.utility.write_log(20, msg) + report_file_name = self.report_path.replace('*', fqdn + '_' + path) + pd.DataFrame(report).to_csv(report_file_name, mode='a', header=False, index=False) + + self.utility.write_log(20, '[Out] Create report body [{}].'.format(self.file_name)) + + def create_exploit_report(self): + # Gather reporting items. + csv_file_list = glob.glob(os.path.join(self.report_dir, '*.csv')) + + # Create DataFrame. + content_list = [] + try: + for file in csv_file_list: + content_list.append(pd.read_csv(file, names=self.header, sep=',')) + df_csv = pd.concat(content_list).drop_duplicates().sort_values(by=['ip', 'port'], ascending=True).reset_index(drop=True, col_level=1) + except Exception as e: + self.utility.print_message(FAIL, 'Invalid file error: {}'.format(e)) + return + + items = [] + for idx in range(len(df_csv)): + items.append({'ip_addr': df_csv.loc[idx, 'ip'], + 'port': df_csv.loc[idx, 'port'], + 'prod_name': df_csv.loc[idx, 'service'], + 'vuln_name': df_csv.loc[idx, 'vuln_name'], + 'type': df_csv.loc[idx, 'type'], + 'description': df_csv.loc[idx, 'description'], + 'exploit': df_csv.loc[idx, 'exploit'], + 'target': df_csv.loc[idx, 'target'], + 'payload': df_csv.loc[idx, 'payload'], + 'ref': str(df_csv.loc[idx, 'reference']).replace('@', '
')}) + + # Setting template. + env = Environment(loader=FileSystemLoader(self.report_dir)) + template = env.get_template(self.template) + pd.set_option('display.max_colwidth', -1) + html = template.render({'title': 'GyoiThon Scan Report', 'items': items}) + with open(self.report_path_exploit, 'w') as fout: + fout.write(html) diff --git a/modules/Gyoi_Spider.py b/modules/Gyoi_Spider.py new file mode 100644 index 0000000..e75d83a --- /dev/null +++ b/modules/Gyoi_Spider.py @@ -0,0 +1,56 @@ +#!/usr/bin/python +# coding:utf-8 +import os +import time +import codecs +import scrapy +from scrapy.http import Request + + +class SimpleSpider(scrapy.Spider): + name = 'simple_spider' + + def __init__(self, category=None, *args, **kwargs): + super(SimpleSpider, self).__init__(*args, **kwargs) + self.start_urls = getattr(self, 'target_url', None) + self.allowed_domains = [getattr(self, 'allow_domain', None)] + self.depth_limit = int(getattr(self, 'depth_limit', None)) + self.delay_time = float(getattr(self, 'delay', None)) + self.store_path = getattr(self, 'store_path', None) + self.response_log = getattr(self, 'response_log', None) + self.custom_settings = { + 'DEPTH_LIMIT ': self.depth_limit, + 'DOWNLOAD_DELAY': self.delay_time, + 'ROBOTSTXT_OBEY': True, + 'FEED_EXPORT_ENCODING': 'utf-8' + } + log_file = os.path.join(self.store_path, self.response_log) + self.fout = codecs.open(log_file, 'w', encoding='utf-8') + + def start_requests(self): + url = self.start_urls + yield Request(url, self.parse) + + def parse(self, response): + self.fout.write(response.body.decode('utf-8')) + for href in response.css('a::attr(href)'): + full_url = response.urljoin(href.extract()) + time.sleep(self.delay_time) + yield scrapy.Request(full_url, callback=self.parse_item) + #for src in response.css('script::attr(src)'): + # full_url = response.urljoin(src.extract()) + # time.sleep(self.delay_time) + # yield scrapy.Request(full_url, callback=self.parse_item) + + def parse_item(self, response): + urls = [] + self.fout.write(response.body.decode('utf-8')) + for href in response.css('a::attr(href)'): + full_url = response.urljoin(href.extract()) + urls.append(full_url) + #for src in response.css('script::attr(src)'): + # full_url = response.urljoin(src.extract()) + # urls.append(full_url) + yield { + 'urls': urls, + } diff --git a/modules/Gyoi_SpiderControl.py b/modules/Gyoi_SpiderControl.py new file mode 100644 index 0000000..4377ed1 --- /dev/null +++ b/modules/Gyoi_SpiderControl.py @@ -0,0 +1,91 @@ +#!/bin/env python +# -*- coding: utf-8 -*- +import os +import sys +import codecs +import json +import configparser +from urllib3 import util +from subprocess import Popen + +# Type of printing. +OK = 'ok' # [*] +NOTE = 'note' # [+] +FAIL = 'fail' # [-] +WARNING = 'warn' # [!] +NONE = 'none' # No label. + + +class SpiderControl: + def __init__(self, utility): + # Read config.ini. + self.utility = utility + config = configparser.ConfigParser() + self.file_name = os.path.basename(__file__) + self.full_path = os.path.dirname(os.path.abspath(__file__)) + self.root_path = os.path.join(self.full_path, '../') + config.read(os.path.join(self.root_path, 'config.ini')) + + try: + self.output_base_path = config['Spider']['output_base_path'] + self.store_path = os.path.join(self.full_path, self.output_base_path) + self.output_filename = config['Spider']['output_filename'] + self.spider_depth_limit = config['Spider']['depth_limit'] + self.spider_delay_time = config['Spider']['delay_time'] + except Exception as e: + self.utility.print_message(FAIL, 'Reading config.ini is failure : {}'.format(e)) + self.utility.write_log(40, 'Reading config.ini is failure : {}'.format(e)) + sys.exit(1) + + if os.path.exists(self.store_path) is False: + os.mkdir(self.store_path) + + # Running spider. + def run_spider(self, protocol, target_ip, target_port, target_path): + self.utility.write_log(20, '[In] Run spider [{}].'.format(self.file_name)) + + # Execute crawling using Scrapy. + all_targets_log = [] + target_url = protocol + '://' + target_ip + ':' + target_port + target_path + target_log = [target_url] + now_time = self.utility.get_current_date('%Y%m%d%H%M%S') + response_log = protocol + '_' + target_ip + '_' + target_port + '_' + now_time + '.log' + result_dir = os.path.join(self.utility.modules_dir, self.output_base_path) + result_path = os.path.join(result_dir, now_time + self.output_filename) + option = ' -a target_url=' + target_url + ' -a allow_domain=' + target_ip + \ + ' -a depth_limit=' + self.spider_depth_limit + ' -a delay=' + self.spider_delay_time + \ + ' -a store_path=' + self.store_path + ' -a response_log=' + response_log + ' -o ' + result_path + spider_path = os.path.join(self.full_path, 'Gyoi_Spider.py') + command = 'scrapy runspider ' + spider_path + option + msg = 'Execute spider : {}.'.format(command) + self.utility.print_message(OK, msg) + self.utility.write_log(20, msg) + proc = Popen(command, shell=True) + proc.wait() + + # Get crawling result. + dict_json = {} + if os.path.exists(result_path): + with codecs.open(result_path, 'r', encoding='utf-8') as fin: + target_text = self.utility.delete_ctrl_char(fin.read()) + if target_text != '': + dict_json = json.loads(target_text) + else: + self.utility.print_message(WARNING, '[{}] is empty.'.format(result_path)) + + # Exclude except allowed domains. + for idx in range(len(dict_json)): + items = dict_json[idx]['urls'] + for item in items: + try: + if target_ip == util.parse_url(item).host: + target_log.append(item) + except Exception as e: + msg = 'Excepting allowed domain is failure : {}'.format(e) + self.utility.print_message(FAIL, msg) + self.utility.write_log(30, msg) + + self.utility.write_log(20, 'Get spider result.') + all_targets_log.append([target_url, os.path.join(self.store_path, response_log), list(set(target_log))]) + self.utility.write_log(20, '[Out] Run spider [{}].'.format(self.file_name)) + return all_targets_log diff --git a/modules/Gyoi_VersionChecker.py b/modules/Gyoi_VersionChecker.py new file mode 100644 index 0000000..765366c --- /dev/null +++ b/modules/Gyoi_VersionChecker.py @@ -0,0 +1,85 @@ +#!/bin/env python +# -*- coding: utf-8 -*- +import os +import sys +import codecs +import re +import configparser + +# Type of printing. +OK = 'ok' # [*] +NOTE = 'note' # [+] +FAIL = 'fail' # [-] +WARNING = 'warn' # [!] +NONE = 'none' # No label. + + +class VersionChecker: + def __init__(self, utility): + # Read config.ini. + self.utility = utility + config = configparser.ConfigParser() + self.file_name = os.path.basename(__file__) + self.full_path = os.path.dirname(os.path.abspath(__file__)) + self.root_path = os.path.join(self.full_path, '../') + config.read(os.path.join(self.root_path, 'config.ini')) + + try: + self.signatures_dir = os.path.join(self.root_path, config['Common']['signature_path']) + self.signature_file = os.path.join(self.signatures_dir, config['VersionChecker']['signature_file']) + except Exception as e: + self.utility.print_message(FAIL, 'Reading config.ini is failure : {}'.format(e)) + self.utility.write_log(40, 'Reading config.ini is failure : {}'.format(e)) + sys.exit(1) + + # Identify product name using signature. + def identify_product(self, response): + self.utility.write_log(20, '[In] Identify product [{}].'.format(self.file_name)) + product_list = [] + + try: + # Identify product name and version. + with codecs.open(self.signature_file, 'r', 'utf-8') as fin: + matching_patterns = fin.readlines() + for pattern in matching_patterns: + items = pattern.replace('\r', '').replace('\n', '').split('@') + category = items[0] + vendor = items[1].lower() + product = items[2].lower() + default_ver = items[3] + signature = items[4] + obj_match = re.search(signature, response, flags=re.IGNORECASE) + if obj_match is not None: + trigger = obj_match.group(1) + + # Check version. + version = default_ver + if obj_match.re.groups > 1: + version = obj_match.group(2) + + # Add product name and version. + product_list.append([category, vendor, product, version, trigger]) + msg = 'Find product={}/{}, verson={}, trigger={}'.format(vendor, product, version, trigger) + self.utility.print_message(OK, msg) + self.utility.write_log(20, msg) + except Exception as e: + msg = 'Identifying product is failure : {}'.format(e) + self.utility.print_exception(e, msg) + self.utility.write_log(30, msg) + + self.utility.write_log(20, '[Out] Identify product [{}].'.format(self.file_name)) + return list(map(list, set(map(tuple, product_list)))) + + # Classifier product name using signatures. + def get_product_name(self, response): + self.utility.print_message(NOTE, 'Analyzing gathered HTTP response.') + self.utility.write_log(20, '[In] Analyzing gathered HTTP response [{}].'.format(self.file_name)) + + # Execute classifier. + product_list = self.identify_product(response) + if len(product_list) == 0: + self.utility.print_message(WARNING, 'Product Not Found.') + self.utility.write_log(30, 'Product Not Found.') + + self.utility.write_log(20, '[Out] Analyzing gathered HTTP response [{}].'.format(self.file_name)) + return product_list diff --git a/modules/Gyoi_VersionCheckerML.py b/modules/Gyoi_VersionCheckerML.py new file mode 100644 index 0000000..1575aad --- /dev/null +++ b/modules/Gyoi_VersionCheckerML.py @@ -0,0 +1,128 @@ +#!/bin/env python +# -*- coding: utf-8 -*- +import os +import sys +import codecs +import configparser +import pickle +from .NaiveBayes import NaiveBayes + +# Type of printing. +OK = 'ok' # [*] +NOTE = 'note' # [+] +FAIL = 'fail' # [-] +WARNING = 'warn' # [!] +NONE = 'none' # No label. + + +class VersionCheckerML: + def __init__(self, utility): + # Read config.ini. + self.utility = utility + config = configparser.ConfigParser() + self.file_name = os.path.basename(__file__) + self.full_path = os.path.dirname(os.path.abspath(__file__)) + self.root_path = os.path.join(self.full_path, '../') + try: + config.read(os.path.join(self.root_path, 'config.ini')) + except Exception as e: + self.utility.print_message(FAIL, 'Reading config.ini is failure : {}'.format(e)) + self.utility.write_log(40, 'Reading config.ini is failure : {}'.format(e)) + sys.exit(1) + + self.category_type = config['VersionCheckerML']['category'] + self.train_path = os.path.join(self.full_path, config['VersionCheckerML']['train_path']) + self.trained_path = os.path.join(self.full_path, config['VersionCheckerML']['trained_path']) + self.train_os_in = os.path.join(self.train_path, config['VersionCheckerML']['train_os_in']) + self.train_os_out = os.path.join(self.trained_path, config['VersionCheckerML']['train_os_out']) + self.train_web_in = os.path.join(self.train_path, config['VersionCheckerML']['train_web_in']) + self.train_web_out = os.path.join(self.trained_path, config['VersionCheckerML']['train_web_out']) + self.train_framework_in = os.path.join(self.train_path, config['VersionCheckerML']['train_framework_in']) + self.train_framework_out = os.path.join(self.trained_path, config['VersionCheckerML']['train_framework_out']) + self.train_cms_in = os.path.join(self.train_path, config['VersionCheckerML']['train_cms_in']) + self.train_cms_out = os.path.join(self.trained_path, config['VersionCheckerML']['train_cms_out']) + return + + # Identify product name using ML. + def identify_product(self, response): + self.utility.write_log(20, '[In] Identify product [{}].'.format(self.file_name)) + product_list = [] + + try: + # Predict product name each category (OS, Middleware, CMS..). + list_category = self.category_type.split('@') + for category in list_category: + # Learning. + nb = None + if category == 'OS': + nb = self.train(self.train_os_in, self.train_os_out) + elif category == 'WEB': + nb = self.train(self.train_web_in, self.train_web_out) + elif category == 'FRAMEWORK': + nb = self.train(self.train_framework_in, self.train_framework_out) + elif category == 'CMS': + nb = self.train(self.train_cms_in, self.train_cms_out) + else: + self.utility.print_message(FAIL, 'Choose category is not found.') + exit(1) + + # Predict product name. + product, prob, keyword_list, classified_list = nb.classify(response) + + # Output result of prediction (body). + # If no feature, result is unknown. + if len(keyword_list) != 0: + product_list.append([category, '*', product, '*', ','.join(keyword_list)]) + msg = 'Predict product={}/{}%, verson={}, trigger={}'.format(product, prob, '*', keyword_list) + self.utility.print_message(OK, msg) + self.utility.write_log(20, msg) + self.utility.print_message(NOTE, 'category : {}'.format(category)) + except Exception as e: + msg = 'Identifying product is failure : {}'.format(e) + self.utility.print_exception(e, msg) + self.utility.write_log(30, msg) + + self.utility.write_log(20, '[Out] Identify product [{}].'.format(self.file_name)) + return list(map(list, set(map(tuple, product_list)))) + + # Classifier product name using Machine Learning. + def get_product_name(self, response): + self.utility.print_message(NOTE, 'Analyzing gathered HTTP response using ML.') + self.utility.write_log(20, '[In] Analyzing gathered HTTP response [{}].'.format(self.file_name)) + + # Execute classifier. + product_list = self.identify_product(response) + if len(product_list) == 0: + self.utility.print_message(WARNING, 'Product Not Found.') + self.utility.write_log(30, 'Product Not Found.') + + self.utility.write_log(20, '[Out] Analyzing gathered HTTP response [{}].'.format(self.file_name)) + return product_list + + # Execute learning / Get learned data. + def train(self, in_file, out_file): + self.utility.write_log(20, '[In] Train/Get learned data [{}].'.format(self.file_name)) + # If existing learned data (pkl), load learned data. + nb = None + if os.path.exists(out_file): + with open(out_file, 'rb') as f: + nb = pickle.load(f) + # If no learned data, execute learning. + else: + # Read learning data. + nb = NaiveBayes() + with codecs.open(in_file, 'r', 'utf-8') as fin: + lines = fin.readlines() + items = [] + + for line in lines: + words = line[:-2] + train_words = words.split('@') + items.append(train_words[1]) + nb.train(train_words[3], train_words[0]) + + # Save learned data to pkl file. + with open(out_file, 'wb') as f: + pickle.dump(nb, f) + self.utility.write_log(20, '[Out] Train/Get learned data [{}].'.format(self.file_name)) + return nb diff --git a/modules/NaiveBayes.py b/modules/NaiveBayes.py new file mode 100644 index 0000000..b50fb16 --- /dev/null +++ b/modules/NaiveBayes.py @@ -0,0 +1,88 @@ +#!/bin/env python +# -*- coding: utf-8 -*- +import os +import sys +import math +import re + + +class NaiveBayes: + def __init__(self): + self.vocabularies = set() + self.word_count = {} + self.category_count = {} + self.file_name = os.path.basename(__file__) + + # Count up word (Create Bag-of-Words). + def word_count_up(self, word, category): + self.word_count.setdefault(category, {}) + self.word_count[category].setdefault(word, 0) + self.word_count[category][word] += 1 + self.vocabularies.add(word) + + # Count up category number. + def category_count_up(self, category): + self.category_count.setdefault(category, 0) + self.category_count[category] += 1 + + # Learning based on keyword and category. + def train(self, doc, category): + # Count each category. + self.word_count_up(doc, category) + # Count category number. + self.category_count_up(category) + + # Calculate prior probability of Bayes. + def prior_prob(self, category): + num_of_categories = sum(self.category_count.values()) + num_of_docs_of_the_category = self.category_count[category] + return float(num_of_docs_of_the_category) / float(num_of_categories) + + # Count number of appearance. + def num_of_appearance(self, word, category): + word_count = 0 + keyword_list = [] + for key_item in self.word_count[category]: + list_match = re.findall(key_item, word, flags=re.IGNORECASE) + if len(list_match) != 0: + word_count += 1 + for item in list_match: + keyword_list.append(item) + prob = float(word_count) / float(len(self.word_count[category])) + return word_count, list(set(keyword_list)), prob + + # Calculate Bayes. + def word_prob(self, word, category): + numerator, keyword_list, temp_prob = self.num_of_appearance(word, category) + # Laplace smoothing. + numerator += 1 + denominator = sum(self.word_count[category].values()) + len(self.vocabularies) + prob = float(numerator) / float(denominator) + return prob, keyword_list, temp_prob + + # Calculate score. + def score(self, word, category): + score = math.log(self.prior_prob(category)) + prob, keyword_list, temp_prob = self.word_prob(word, category) + score += math.log(prob) + return score, prob, keyword_list, temp_prob + + # Execute classify. + def classify(self, doc): + best_guessed_category = None + max_prob_before = -sys.maxsize + keyword_list = [] + classified_list = [] + + # Calculate score each category. + for category in self.category_count.keys(): + score, total_prob, feature_list, category_prob = self.score(doc, category) + classified_list.append([category, float(total_prob), feature_list]) + + # Classify word to highest score's category. + if score > max_prob_before: + max_prob_before = score + best_guessed_category = category + keyword_list = feature_list + classified_prob = total_prob + return best_guessed_category, float(classified_prob), keyword_list, classified_list diff --git a/modules/crawl_result/README b/modules/crawl_result/README new file mode 100644 index 0000000..3ed9bb7 --- /dev/null +++ b/modules/crawl_result/README @@ -0,0 +1 @@ +### README diff --git a/modules/data/conversion_table.csv b/modules/data/conversion_table.csv new file mode 100644 index 0000000..7bb6a2f --- /dev/null +++ b/modules/data/conversion_table.csv @@ -0,0 +1,43 @@ +nvd_vendor,nvd_name,metasploit +tiki,tikiwiki_cms\/groupware,tikiwiki +wordpress,wordpress,wordpress +sixapart,movabletype,movabletype +lockon,ec-cube,ec-cube +xoops,xoops,xoops +drupal,drupal,drupal +joomla,joomla\!,joomla +typo3,typo3,typo3 +site_publis,site_publis,site_publis +oscommerce,oscommerce,oscommerce +apache,tomcat,tomcat +apache,coyote_http_connector,coyote +apache,struts,struts +adobe,coldfusion,coldfusion +bea_systems,webLogic,weblogic +jboss,jboss,jboss +oracle,application_server,oracle +redhat,enterprise_linux,redhat +ubuntu,ubuntu_linux,ubuntu +centos,centos,centos +unix,unix,unix +microsoft,windows,windows +apache,http_server,apache +nginx,nginx,nginx +microsoft,internet_information_server,iis +ibm,http_server,ibm +f5,big-ip,bigip +minihttpserver.net,web_forums_server,minihttp +php,php,php +microsoft,asp.net,asp +phpmyadmin,phpmyadmin,phpmyadmin +squirrelmail,squirrelmail,squirrelmail +openssl,openssl,openssl +mod_ssl,mod_ssl,mod_ssl +bitforest,scutum,scutum +sugarcrm,sugarcrm,sugarcrm +apache,subversion,subversion +roundcube,webmail,webmail +awstats,awstats,awstats +webmin,webmin,webmin +phpbb,phpbb,phpbb +mediawiki,mediawiki,mediawiki diff --git a/modules/train_data/train_cms_in.txt b/modules/train_data/train_cms_in.txt new file mode 100644 index 0000000..68d2eaf --- /dev/null +++ b/modules/train_data/train_cms_in.txt @@ -0,0 +1,6 @@ +joomla@joomla\!@*@(Set-Cookie: [a-z0-9]{32}=.*); +joomla@joomla\!@*@(Set-Cookie: .*=[a-z0-9]{26,32}); +heartcore@heartcore@*@(Set-Cookie:.*=[A-Z0-9]{32});.* +heartcore@heartcore@*@( +Login@.*( +Login@.*( +Login@.*( diff --git a/modules/trained_data/train_cms_out.pkl b/modules/trained_data/train_cms_out.pkl new file mode 100644 index 0000000..49be486 Binary files /dev/null and b/modules/trained_data/train_cms_out.pkl differ diff --git a/modules/trained_data/train_page_type.pkl b/modules/trained_data/train_page_type.pkl new file mode 100644 index 0000000..9d973c7 Binary files /dev/null and b/modules/trained_data/train_page_type.pkl differ diff --git a/modules/vuln_db/README b/modules/vuln_db/README new file mode 100644 index 0000000..3ed9bb7 --- /dev/null +++ b/modules/vuln_db/README @@ -0,0 +1 @@ +### README diff --git a/report/report_template.html b/report/report_template.html new file mode 100644 index 0000000..a771f84 --- /dev/null +++ b/report/report_template.html @@ -0,0 +1,109 @@ + + + + + {{ title }} + + + + +

GyoiThon scan Report

+ + + + + + + + + {% for item in items %} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + {% endfor %} +
IndexItemValue
{{ loop.index }}IP address{{ item.ip_addr }}
Port number{{ item.port }}
Product name{{ item.prod_name }}
Vuln name{{ item.vuln_name }}
Type{{ item.type }}
Description{{ item.description }}
Exploit module{{ item.exploit }}
Target{{ item.target }}
Payload{{ item.payload }}
Reference{{ item.ref }}
+ + diff --git a/report/~$sample_report.xlsx b/report/~$sample_report.xlsx new file mode 100644 index 0000000..51f89f5 Binary files /dev/null and b/report/~$sample_report.xlsx differ diff --git a/requirements.txt b/requirements.txt index 83b58a3..50ef8e0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,8 @@ +censys==0.0.8 docopt==0.6.2 Jinja2==2.10 msgpack-python==0.5.6 pandas==0.22.0 urllib3==1.22 -Scrapy==1.5.0 \ No newline at end of file +Scrapy==1.5.0 +google-api-python-client==1.7.4 \ No newline at end of file diff --git a/signatures/signature_comment.txt b/signatures/signature_comment.txt new file mode 100644 index 0000000..7e400cc --- /dev/null +++ b/signatures/signature_comment.txt @@ -0,0 +1,18 @@ +HTML Tag@(]*?>) +HTML Tag@(]*?>) +HTML Tag@(]*?>) +HTML Tag@(]*?>) +SQL@(select\s.+from\s.+) +SQL@(update\s+set\s.+) +SQL@(insert\s+into\s.+\(.+\)\s+values\s+\(.+\)) +SQL@(delete\s+from\s.+) +Scheme@(http://.+|https://.+|file://.+|data://.+) +IP Address@((([1-9]?[0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([1-9]?[0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])+) +User ID@(user=|[\"']user[\"']:|user_id=|[\"']user_id[\"']:|id=|[\"']id[\"']:) +User ID@(ユーザ|ユーザID|ユーザーID|ユーザID|ユーザーID) +Password@(password=|[\"']password[\"']:|pass=|[\"']pass[\"']:) +Password@(パスワード) +Session ID@(session_id|sessionid|sessid|session|PHPSESSID|JSESSIONID|ASP\.NET_SessionId) +Postal Code@([0-9]{3}-*[0-9]{4}|[0-9]{5}-*[0-9]{4}|[0-9]{3}-*[0-9]{3}) +Mail Address@([a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)) +Name@(開発者|作成者|修正者|更新者|備考) diff --git a/signatures/signature_default_content.txt b/signatures/signature_default_content.txt new file mode 100644 index 0000000..beb7f0e --- /dev/null +++ b/signatures/signature_default_content.txt @@ -0,0 +1,95 @@ +Web@apache@http_server@*@/server-status@*@Version:.*(Apache/([0-9]+[\.0-9]*[\.0-9]*))@0 +Web@apache@http_server@*@/error/README@*@*@0 +Web@apache@http_server@*@/icons/@*@*@0 +Web@apache@http_server@*@/icons/README@*@*@0 +Web@apache@http_server@*@/icons/small/README.txt@(These icons are provided as)@*@0 +Web@apache@http_server@*@/manual/@(Apache)@*@0 +Web@apache@http_server@*@/manual/images/@(<title>Apache)@*@0 +Web@apache@http_server@*@/manual/style/@(<title>Apache)@*@0 +CMS@wordpress@wordpress@*@/wp-login.php@*@(ver=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@wordpress@wordpress@*@/wp/wp-login.php@*@(ver=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@wordpress@wordpress@*@/wp-admin/@*@(ver=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@wordpress@wordpress@*@/wp/wp-admin@*@(ver=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@wordpress@wordpress@*@/wp-content/@*@(ver=([0-9]+[\.0-9]*[\.0-9]*))@0 +CMS@wordpress@wordpress@*@/wp-includes/@*@(ver=([0-9]+[\.0-9]*[\.0-9]*))@0 +CMS@wordpress@wordpress@*@/wp-json/@*@(ver=([0-9]+[\.0-9]*[\.0-9]*))@0 +CMS@wordpress@wordpress@*@/wp-json/wp/v2/users@*@(ver=([0-9]+[\.0-9]*[\.0-9]*))@0 +CMS@wordpress@wordpress@*@/xmlrpc.php@(XML-RPC server accepts POST requests only)*@*@0 +CMS@wordpress@wordpress@*@/?author=1@.*(/author/).*.@(ver=([0-9]+[\.0-9]*[\.0-9]*))@0 +CMS@drupal@drupal@*@/user/login@(/drupal)@*@1 +CMS@drupal@drupal@*@/core/misc/drupalSettingsLoader.js@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@0 +CMS@drupal@drupal@*@/core/misc/drupal.js@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@0 +CMS@drupal@drupal@*@/core/misc/drupal.init.js@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@0 +CMS@drupal@drupal@*@/themes/bootstrap/js/drupal.bootstrap.js@*@*@0 +CMS@drupal@drupal@*@/drupal/@*@*@0 +CMS@joomla@joomla\!@*@/administrator/@*@*@1 +CMS@joomla@joomla\!@*@/joomla/@*@*@0 +CMS@joomla@joomla\!@*@/templates/joomla/@*@*@0 +CMS@sixapart@movabletype@*@/mt.cgi@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@sixapart@movabletype@*@/mt.cgi?__lang=ja@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@sixapart@movabletype@*@/mt/mt.cgi@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@sixapart@movabletype@*@/mt/mt.cgi?__lang=ja@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@sixapart@movabletype@*@/cgi-bin/mt/mt.cgi@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@sixapart@movabletype@*@/cgi-bin/mt/mt.cgi?__lang=ja@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@sixapart@movabletype@*@/mt-sp.cgi@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@sixapart@movabletype@*@/mt/mt-sp.cgi@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@tiki@tikiwiki_cms\/groupware@*@/tikiwiki/@*@*@0 +CMS@n-i-agroinformatics@soy_cms@*@/cms/admin/index.php@*@*@1 +CMS@n-i-agroinformatics@soy_cms@*@/soy/admin/index.php@*@*@1 +CMS@mediawiki@mediawiki@*@/index.php?Special:UserLogin@*@*@1 +CMS@mediawiki@mediawiki@*@/mediawiki/@*@*@0 +CMS@cmsimple@cmsimple@*@/?login@*@*@1 +CMS@cmsimple@cmsimple_classic@*@/?login@*@*@1 +CMS@cmsmadesimple@cms_made_simple@*@/?login@*@*@1 +CMS@modx@modx_revolution@*@/manager/@*@*@1 +CMS@nucleuscms@nucleus@*@/nucleus/@*@*@1 +CMS@nucleuscms@nucleus_cms@*@/nucleus/@*@*@1 +CMS@xoops@xoops@*@/user.php@*@*@1 +CMS@rcms@rcms@*@/management/login/login/@.*(\-rcms).*@*@1 +Framework@apache@tomcat@*@/tomcat-docs/@*@*@0 +Framework@apache@tomcat@*@/examples/servlets/@*@*@0 +Framework@apache@tomcat@*@/examples/jsp@*@*@0 +Framework@apache@tomcat@*@/examples/jsp/snp/snoop.jsp@*@*@0 +Framework@apache@tomcat@*@/examples/jsp/servlets/@*@*@0 +Framework@apache@tomcat@*@/webdav/@*@*@0 +Framework@apache@tomcat@*@/admin/@.*(Licensed to the Apache Software Foundation).*@*@0 +Framework@apache@tomcat@*@/config.aspx@*@.*(Apache Tomcat/([0-9]).*\- Error report).*@0 +Framework@apache@tomcat@*@/config.jsp@*@.*(Apache Tomcat/([0-9]).*\- Error report).*@0 +Framework@apache@tomcat@*@/config.php@*@.*(Apache Tomcat/([0-9]).*\- Error report).*@0 +Framework@apache@coyote_http_connector@*@/tomcat-docs/@*@*@0 +Framework@apache@coyote_http_connector@*@/examples/jsp@*@*@0 +Framework@apache@coyote_http_connector@*@/examples/servlets/@*@*@0 +Framework@apache@coyote_http_connector@*@/webdav/@*@*@0 +Framework@apache@coyote_http_connector@*@/admin/@*@*@0 +Framework@adobe@coldfusion@*@/CFIDE/@*@*@0 +Framework@adobe@coldfusion@*@/cfdocs/@*@*@0 +Language@php@php@*@/?=PHPB8B5F2A0-3C92-11d3-A3A9-4C7B08C10000@*@*@0 +Language@php@php@*@/info.php@*@*@0 +Language@php@php@*@/phpinfo.php@*@*@0 +Language@php@php@*@/test.php@*@*@0 +Tool@squirrelmail@squirrelmail@*@/webmail/src/login.php@*@*@1 +Tool@squirrelmail@squirrelmail@*@/sqwebmail/@*@*@0 +Tool@squirrelmail@squirrelmail@*@/webmail/@*@*@0 +Tool@apache@subversion@*@/svn/@*@*@0 +Tool@apache@subversion@*@/.svn/entries@*@*@0 +Tool@apache@subversion@*@/repos/@*@*@0 +Tool@hping@visitors@*@/visitors/@*@*@0 +Tool@hping@visitors@*@/log/@*@*@0 +Tool@webdruid@webdruid@*@/webdruid/@*@*@0 +Tool@webdruid@webdruid@*@/usage/@*@*@0 +Tool@htdig@htdig@*@/htdig/@*@*@0 +Tool@htdig@htdig@*@/htsearch/@*@*@0 +Tool@gnu@mailman@*@/mailman/@*@.*Delivered by (Mailman.*[0-9])</td>.*@0 +Tool@gnu@mailman@*@/mailman/admin/@*@.*Delivered by (Mailman.*[0-9])</td>.*@0 +Tool@gnu@mailman@*@/mailman/admin/mailman@*@.*Delivered by (Mailman.*[0-9])</td>.*@0 +Tool@gnu@mailman@*@/mailman/create@*@.*Delivered by (Mailman.*[0-9])</td>.*@0 +Tool@phpmyadmin@phpmyadmin@*@/phpmyadmin/@*@.*<title>(phpMyAdmin.*[0-9]).*@1 +Tool@phpmyadmin@phpmyadmin@*@/phpmyadmin/index.php@*@.*(phpMyAdmin.*[0-9]).*@1 +Tool@phpmyadmin@phpmyadmin@*@/phpMyAdmin/@*@.*(phpMyAdmin.*[0-9]).*@1 +Tool@phpmyadmin@phpmyadmin@*@/phpMyAdmin/index.php@*@.*(phpMyAdmin.*[0-9]).*@1 +Tool@awstats@awstats@*@/webstat/@*@*@0 +Tool@mrtg@mrtgconfig@*@/mrtg/@*@*@0 +Tool@mrtg@multi_router_traffic_grapher_cgi@*@/mrtg/@*@*@0 +Tool@webmin@webmin@*@/webmin/@*@*@0 +Tool@phpbb@phpbb@*@/phpbb/@*@*@0 +Tool@dwr@dwr@*@/dwr/index.html@*@*@0 diff --git a/signatures/signature_error.txt b/signatures/signature_error.txt new file mode 100644 index 0000000..410943a --- /dev/null +++ b/signatures/signature_error.txt @@ -0,0 +1,21 @@ +Language@Java@*@([^0-9a-zA-z\.]java\.[0-9a-zA-Z\.])* +Language@Java@*@([^0-9a-zA-z\.]javax\.[0-9a-zA-Z\.])* +Language@Java@*@([^0-9a-zA-z\.]com\.[0-9a-zA-Z\.])* +Language@Java@*@([^0-9a-zA-z\.]jp\.[0-9a-zA-Z\.])* +Language@Java@*@([^0-9a-zA-z\.]org\.[0-9a-zA-Z\.])* +Language@PHP@*@(Fatal[^\n]*php) +Language@PHP@*@(Warning[^\n]*php) +DataBase@Oracle@*@(ORA-[0-9a-zA-Z\.])* +Other@*@*@(Exception[^\r\n<>])* +Other@*@*@(Error[^\r\n<>])* +Other@*@*@(Illegal[^\r\n<>])* +Other@*@*@(Incompatible[^\r\n<>])* +Other@*@*@(Incorrect[^\r\n<>])* +Other@*@*@(Invalid[^\r\n<>])* +Other@*@*@(Missing[^\r\n<>])* +Other@*@*@(Unknown[^\r\n<>])* +Other@*@*@(Expected '\)' in [^\r\n<>])* +Other@*@*@(System\.Web\.) +Other@*@*@(Error Number:\s+[0-9]+)\s*$ +Other@*@*@(fail|Fail|ERROR|error|Notice|Parse|Warning|Fatal)[^\n]*line[^\n]*[0-9]+ +Other@*@*@(fail|Fail|ERROR|error|Notice|Parse|Warning|Fatal|ERRSTR).{0,500}line[^\n]*[0-9]+ diff --git a/signatures/signature_page_type_from_url.txt b/signatures/signature_page_type_from_url.txt new file mode 100644 index 0000000..d9b18b2 --- /dev/null +++ b/signatures/signature_page_type_from_url.txt @@ -0,0 +1 @@ +Login@.*(login|log_in|logon|log_on|signin|sign_in).* diff --git a/signatures/signature_product.txt b/signatures/signature_product.txt new file mode 100644 index 0000000..f9b04e3 --- /dev/null +++ b/signatures/signature_product.txt @@ -0,0 +1,128 @@ +CMS@tiki@tikiwiki_cms\/groupware@*@(Powered by TikiWiki) +CMS@wordpress@wordpress@*@<.*=(.*/wp-).*/.*> +CMS@wordpress@wordpress@*@( +CMS@wordpress@wordpress@*@.*(Powered by WordPress) +CMS@wordpress@wordpress@*@.*(://.*/xmlrpc.php) +CMS@wordpress@wordpress@*@.*(WordPress ([0-9]+[\.0-9]*[\.0-9]*)).* +CMS@wordpress@wordpress@*@.*(WordPress/([0-9]+[\.0-9]*[\.0-9]*)).* +CMS@sixapart@movabletype@*@.*Movable Type.*(v=([0-9]+[\.0-9]*[\.0-9]*)).* +CMS@sixapart@movabletype@*@<.*/(mt-.*)/.*> +CMS@sixapart@movabletype@*@(<.*/mt/.*>) +CMS@sixapart@movabletype@*@( +CMS@sixapart@movabletype@*@( +CMS@sixapart@movabletype@*@
(Powered by.*Movable Type\s+([0-9]+[\.0-9]*[\.0-9]*)).*
+CMS@lockon@ec-cube@2@Set-Cookie:.*(ECSESSID=.*;) +CMS@lockon@ec-cube@3@Set-Cookie:.*(eccube=.*;) +CMS@lockon@ec-cube@*@(eccube\.js) +CMS@lockon@ec-cube@*@(eccube\.legacy\.js) +CMS@xoops@xoops@*@(xoopscube\.js) +CMS@xoops@xoops@*@(xoops\.js) +CMS@xoops@xoops@*@(xoops\.css) +CMS@xoops@xoops@*@( +CMS@drupal@drupal@*@.*(data-drupal-link-system-path) +CMS@drupal@drupal@*@.*(jQuery.extend\(Drupal.settings) +CMS@drupal@drupal@*@( +CMS@drupal@drupal@*@( +CMS@drupal@drupal@*@( +CMS@drupal@drupal@*@ +CMS@drupal@drupal@*@ +CMS@drupal@drupal@*@ +CMS@drupal@drupal@*@ +CMS@drupal@drupal@*@.*(X-Drupal-.*Cache: MISS).* +CMS@drupal@drupal@*@.*(X-Drupal-Dynamic-.*Cache: MISS).* +CMS@drupal@drupal@7.0@.*(X-Generator: Drupal 7).* +CMS@drupal@drupal@8.0@.*(X-Generator: Drupal 8).* +CMS@joomla@joomla\!@*@ +CMS@joomla@joomla\!@*@ +CMS@joomla@joomla\!@*@Set-Cookie:.*(jfcookie=.*;) +CMS@joomla@joomla\!@*@(X-Content-Encoded-By:\s+Joomla!\s+([0-9]+[\.0-9]*[\.0-9]*)).* +CMS@typo3@typo3@*@.*(This website is powered by TYPO3).*> +CMS@typo3@typo3@*@.*(href="fileadmin/templates/).*> +CMS@typo3@typo3@*@( +CMS@typo3@typo3@*@(Set-Cookie: .*typo.*=[a-z0-9]{32}); +CMS@typo3@typo3@*@.*(TYPO[0-9]).* +CMS@site_publis@site_publis@*@(X-PUBLIS-Status:\s+REDIRECT\sTO\sPublisRedirectUri) +CMS@oscommerce@oscommerce@*@(Set-Cookie: osCsid=.*); +Framework@apache@tomcat@*@.*(Tomcat/([0-9]+[\.0-9]*[\.0-9]*)) +Framework@apache@tomcat@*@.*(Apache Tomcat User Guide) +Framework@apache@tomcat@*@.*(Tomcat\s([0-9])\sServlet/JSP container) +Framework@apache@tomcat@*@.*(Tomcat\sServlet/JSP container) +Framework@apache@tomcat@*@.*(JSP Samples) +Framework@apache@tomcat@*@.*(Servlet Examples with Code) +Framework@apache@tomcat@*@.*(Apache Tomcat User Guide) +Framework@apache@tomcat@*@.*(KNOWN ISSUES IN THIS RELEASE) +Framework@apache@tomcat@*@.*(Tomcat's administration web) +Framework@apache@coyote_http_connector@*@.*(Tomcat/([0-9]+[\.0-9]*[\.0-9]*)) +Framework@apache@coyote_http_connector@*@.*(Apache Tomcat User Guide) +Framework@apache@coyote_http_connector@*@.*(Tomcat\s([0-9])\sServlet/JSP container) +Framework@apache@coyote_http_connector@*@.*(Tomcat\sServlet/JSP container) +Framework@apache@coyote_http_connector@*@.*(JSP Samples) +Framework@apache@coyote_http_connector@*@.*(Servlet Examples with Code) +Framework@apache@coyote_http_connector@*@.*(Apache Tomcat User Guide) +Framework@apache@coyote_http_connector@*@.*(KNOWN ISSUES IN THIS RELEASE) +Framework@apache@coyote_http_connector@*@.*(Tomcat's administration web) +Framework@apache@struts@*@.*(org\.apache\.struts\.taglib\.html\.TOKEN) +Framework@adobe@coldfusion@*@Set-Cookie.*(CFID=.*;) +Framework@adobe@coldfusion@*@Set-Cookie.*(CFTOKEN=.*;) +Framework@adobe@coldfusion@*@.*(ColdFusion Administrator) +Framework@adobe@coldfusion@*@.*(ColdFusion Documentation) +Framework@adobe@coldfusion@*@.*(what_you_can_do_in_coldfusion_administrator) +Framework@oracle@webLogic@*@.*(WebLogic/([0-9]+[\.0-9]*[\.0-9]*)) +Framework@jboss@jboss@*@X-Powered-By:.*(JBoss-([0-9]+[\.0-9]*[\.0-9]*)) +OS@redhat@enterprise_linux@*@Server:.*(\(Red Hat\)) +OS@redhat@enterprise_linux@*@Server:.*(\(Red Hat Enterprise linux\)) +OS@ubuntu@ubuntu_linux@*@Server:.*(\(Ubuntu\)) +OS@centos@centos@*@Server:.*(\(CentOS\)) +OS@unix@unix@*@Server:.*(\(unix\)) +OS@microsoft@windows@*@Server:.*(iis/1\.0) +OS@microsoft@windows@*@Server:.*(iis/2\.0) +OS@microsoft@windows@*@Server:.*(iis/3\.0) +OS@microsoft@windows@*@Server:.*(iis/4\.0) +OS@microsoft@windows@*@Server:.*(iis/5\.0) +OS@microsoft@windows@*@Server:.*(iis/5\.1) +OS@microsoft@windows@*@Server:.*(iis/6\.0) +OS@microsoft@windows@*@Server:.*(iis/7\.0) +OS@microsoft@windows@*@Server:.*(iis/7\.5) +OS@microsoft@windows@*@Server:.*(iis/8\.0) +OS@microsoft@windows@*@Server:.*(iis/8\.5) +OS@microsoft@windows@*@Server:.*(\(win64\)) +OS@microsoft@windows@*@Server:.*(\(win32\)) +Web@apache@http_server@*@Server:.*(Apache/([0-9]+[\.0-9]*[\.0-9]*)) +Web@apache@http_server@*@Server:.*(Apache)[\s\r\n] +Web@apache@http_server@*@.*(Apache/([0-9]+[\.0-9]*[\.0-9]*)) +Web@apache@http_server@*@.*(Test Page for Apache) +Web@apache@http_server@*@.*(Apache HTTP Server[^D]*Documentation Project) +Web@apache@http_server@*@.*(Apache Status) +Web@apache@http_server@*@.*(Multi Language Custom Error Documents) +Web@apache@http_server@*@.*(This file is generated from xml source) +Web@nginx@nginx@*@Server:.*(nginx/([0-9]+[\.0-9]*[\.0-9]*)) +Web@nginx@nginx@*@Server:.*(nginx) +Web@nginx@nginx@*@.*(nginx/([0-9]+[\.0-9]*[\.0-9]*)) +Web@microsoft@internet_information_server@*@Server:.*(Microsoft-IIS/([0-9]+\.[0-9]+)) +Web@microsoft@internet_information_server@*@Server:.*(IIS/([0-9]+\.[0-9]+)) +Web@microsoft@internet_information_server@*@Server:.*(IIS) +Web@microsoft@internet_information_server@*@.*(It works).* +Web@f5@big-ip@*@Server:.*(bigip) +Language@php@php@*@Server:.*(PHP/([0-9]+[\.0-9]*[\.0-9]*)) +Language@php@php@*@X-Powered-By:.*(PHP/([0-9]+[\.0-9]*[\.0-9]*)) +Language@php@php@*@Set-Cookie:.*(PHPSESSID=.*;) +Language@php@php@*@.*(PHP Credits) +Language@php@php@*@.*(phpinfo\(\)) +Language@php@php@*@.*(<title>phpinfo\(\)) +Language@microsoft@asp.net@*@.*(X-AspNet-Version:.*([0-9]+[\.0-9]*[\.0-9]*)) +Language@microsoft@asp.net@*@.*(X-AspNetMvc-Version:.*([0-9]+[\.0-9]*[\.0-9]*)) +Language@microsoft@asp.net@*@Set-Cookie:.*(ASP\.NET_SessionId=.*;) +Language@microsoft@asp.net@*@.*(X-Powered-By:\s+ASP\.NET) +Tool@phpmyadmin@phpmyadmin@*@.*(phpMyAdmin ([0-9]+[\.0-9]*[\.0-9]*)) +Tool@phpmyadmin@phpmyadmin@*@.*(phpMyAdmin).* +Tool@phpmyadmin@phpmyadmin@*@.*<title>(phpMyAdmin setup).* +Tool@phpmyadmin@phpmyadmin@*@.*<title>(phpMyAdmin.*[0-9]).* +Tool@phpmyadmin@phpmyadmin@*@.*(phpmyadmin.svn.sourceforge.net/svnroot/phpmyadmin/trunk/phpMyAdmin/ChangeLog).* +Tool@phpmyadmin@phpmyadmin@*@.*(phpMyAdmin - ChangeLog).* +Tool@squirrelmail@squirrelmail@*@.*(SquirrelMail version ([0-9]+)) +Tool@openssl@openssl@*@Server:.*(OpenSSL/([0-9]+\.[0-9]+\.[0-9][a-z])) +Tool@openssl@openssl@*@Server:.*(OpenSSL/([0-9]+\.[0-9]+\.[0-9])) +Tool@mod_ssl@mod_ssl@*@Server:.*(mod_ssl/([0-9]+\.[0-9]+\.[0-9]+)) +Tool@gnu@mailman@*@.*Delivered by (Mailman.*[0-9]).* +Tool@awstats@awstats@*@.*(AWStats for domain).* +WAF@bitforest@scutum@*@Server:.*(Scutum) diff --git a/signatures/signature_search_query.txt b/signatures/signature_search_query.txt new file mode 100644 index 0000000..2c4d23e --- /dev/null +++ b/signatures/signature_search_query.txt @@ -0,0 +1,164 @@ +Web@apache@http_server@*@inurl:/server-status@*@Version:.*(Apache/([0-9]+[\.0-9]*[\.0-9]*))@0 +Web@apache@http_server@*@inurl:/error/README@*@*@0 +Web@apache@http_server@*@inurl:/icons/@*@*@0 +Web@apache@http_server@*@inurl:/icons/README@*@*@0 +Web@apache@http_server@*@inurl:/icons/small/README.txt@(These icons are provided as)@*@0 +Web@apache@http_server@*@inurl:/manual/@(Apache)@*@0 +Web@apache@http_server@*@inurl:/manual/images/@(<title>Apache)@*@0 +Web@apache@http_server@*@inurl:/manual/style/@(<title>Apache)@*@0 +Web@microsoft@internet_information_server@*@filetype:webinfo@*@*@0 +Web@microsoft@internet_information_server@*@filetype:exe@*@*@0 +Web@microsoft@internet_information_server@*@filetype:htr@*@*@0 +Web@microsoft@internet_information_server@*@filetype:idc@*@*@0 +Web@microsoft@internet_information_server@*@filetype:htw@*@*@0 +Web@microsoft@internet_information_server@*@filetype:ida@*@*@0 +Web@microsoft@internet_information_server@*@filetype:idq@*@*@0 +CMS@wordpress@wordpress@*@inurl:/wp-login.php@*@(ver=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@wordpress@wordpress@*@inurl:/wp/wp-login.php@*@(ver=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@wordpress@wordpress@*@inurl:/wp-admin/@*@(ver=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@wordpress@wordpress@*@inurl:/wp/wp-admin@*@(ver=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@wordpress@wordpress@*@inurl:/wp-content/@*@(ver=([0-9]+[\.0-9]*[\.0-9]*))@0 +CMS@wordpress@wordpress@*@inurl:/wp-includes/@*@(ver=([0-9]+[\.0-9]*[\.0-9]*))@0 +CMS@wordpress@wordpress@*@inurl:/wp-json/@*@(ver=([0-9]+[\.0-9]*[\.0-9]*))@0 +CMS@wordpress@wordpress@*@inurl:/wp-json/wp/v2/users@*@(ver=([0-9]+[\.0-9]*[\.0-9]*))@0 +CMS@wordpress@wordpress@*@inurl:/xmlrpc.php@(XML-RPC server accepts POST requests only)*@*@0 +CMS@wordpress@wordpress@*@inurl:/wp-admin/install.php@*@(ver=([0-9]+[\.0-9]*[\.0-9]*))@0 +CMS@wordpress@wordpress@*@inurl:/wp-admin/upgrade.php@*@(ver=([0-9]+[\.0-9]*[\.0-9]*))@0 +CMS@wordpress@wordpress@*@inurl:/?author=1@.*(/author/).*.@(ver=([0-9]+[\.0-9]*[\.0-9]*))@0 +CMS@drupal@drupal@*@inurl:/user/login@(/drupal)@*@1 +CMS@drupal@drupal@*@inurl:/core/misc/drupalSettingsLoader.js@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@0 +CMS@drupal@drupal@*@inurl:/core/misc/drupal.js@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@0 +CMS@drupal@drupal@*@inurl:/core/misc/drupal.init.js@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@0 +CMS@drupal@drupal@*@inurl:/themes/bootstrap/js/drupal.bootstrap.js@*@*@0 +CMS@drupal@drupal@*@inurl:/drupal/@*@*@0 +CMS@joomla@joomla\!@*@inurl:/joomla/@*@*@0 +CMS@joomla@joomla\!@*@inurl:/templates/joomla/@*@*@0 +CMS@sixapart@movabletype@*@inurl:/mt.cgi@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@sixapart@movabletype@*@inurl:/mt.cgi?__lang=ja@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@sixapart@movabletype@*@inurl:/mt/mt.cgi@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@sixapart@movabletype@*@inurl:/mt/mt.cgi?__lang=ja@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@sixapart@movabletype@*@inurl:/cgi-bin/mt/mt.cgi@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@sixapart@movabletype@*@inurl:/cgi-bin/mt/mt.cgi?__lang=ja@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@sixapart@movabletype@*@inurl:/mt-sp.cgi@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@sixapart@movabletype@*@inurl:/mt/mt-sp.cgi@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@sixapart@movabletype@*@inurl:/cgi-bin/mt/mt-sp.cgi@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@1 +CMS@sixapart@movabletype@*@inurl:/cgi-bin/mt/mt.psgi@*@(v=([0-9]+[\.0-9]*[\.0-9]*))@0 +CMS@tiki@tikiwiki_cms\/groupware@*@inurl:/tikiwiki/@*@*@0 +CMS@n-i-agroinformatics@soy_cms@*@inurl:/cms/admin/index.php@*@*@1 +CMS@n-i-agroinformatics@soy_cms@*@inurl:/soy/admin/index.php@*@*@1 +CMS@mediawiki@mediawiki@*@inurl:/index.php?Special:UserLogin@*@*@1 +CMS@mediawiki@mediawiki@*@inurl:/mediawiki/@*@*@0 +CMS@cmsimple@cmsimple@*@inurl:/?login@*@*@1 +CMS@cmsimple@cmsimple_classic@*@inurl:/?login@*@*@1 +CMS@cmsmadesimple@cmsmadesimple@*@inurl:/?login@*@*@1 +CMS@modx@modx_revolution@*@inurl:/manager/@*@*@1 +CMS@nucleuscms@nucleus@*@inurl:/nucleus/@*@*@1 +CMS@nucleuscms@nucleus_cms@*@inurl:/nucleus/@*@*@1 +CMS@xoops@xoops@*@inurl:/user.php@*@*@1 +CMS@xoops@xoops@*@inurl:/xoops/@*@*@0 +CMS@rcms@rcms@*@inurl:/management/login/login/@.*(\-rcms).*@*@1 +Framework@apache@tomcat@*@inurl:/tomcat-docs/@*@*@0 +Framework@apache@tomcat@*@inurl:/examples/servlets/@*@*@0 +Framework@apache@tomcat@*@inurl:/examples/jsp@*@*@0 +Framework@apache@tomcat@*@inurl:/examples/jsp/snp/snoop.jsp@*@*@0 +Framework@apache@tomcat@*@inurl:/examples/jsp/servlets/@*@*@0 +Framework@apache@tomcat@*@inurl:/webdav/@*@*@0 +Framework@apache@tomcat@*@inurl:/admin/@.*(Licensed to the Apache Software Foundation).*@*@0 +Framework@apache@tomcat@*@inurl:/config.aspx@*@.*(Apache Tomcat/([0-9]).*\- Error report).*@0 +Framework@apache@tomcat@*@inurl:/config.jsp@*@.*(Apache Tomcat/([0-9]).*\- Error report).*@0 +Framework@apache@tomcat@*@inurl:/config.php@*@.*(Apache Tomcat/([0-9]).*\- Error report).*@0 +Framework@apache@coyote_http_connector@*@inurl:/tomcat-docs/@*@*@0 +Framework@apache@coyote_http_connector@*@inurl:/examples/jsp@*@*@0 +Framework@apache@coyote_http_connector@*@inurl:/examples/servlets/@*@*@0 +Framework@apache@coyote_http_connector@*@inurl:/webdav/@*@*@0 +Framework@apache@coyote_http_connector@*@inurl:/admin/@*@*@0 +Framework@adobe@coldfusion@*@inurl:/CFIDE/@*@*@0 +Framework@adobe@coldfusion@*@inurl:/cfdocs/@*@*@0 +Framework@adobe@coldfusion@*@filetype:cfm@*@*@0 +Language@php@php@*@inurl:/?=PHPB8B5F2A0-3C92-11d3-A3A9-4C7B08C10000@*@*@0 +Language@php@php@*@inurl:/info.php@*@*@0 +Language@php@php@*@inurl:/phpinfo.php@*@*@0 +Language@php@php@*@inurl:/test.php@*@*@0 +Language@php@php@*@filetype:php@*@*@0 +Language@*@Java@*@filetype:jsp@*@*@0 +Language@*@Java@*@filetype:do@*@*@0 +Language@*@Java@*@filetype:action@*@*@0 +Language@microsoft@asp.net@*@filetype:aspx@*@*@0 +Language@microsoft@asp.net@*@filetype:asp@*@*@0 +Language@microsoft@asp.net@*@filetype:asa@*@*@0 +Language@microsoft@asp.net@*@filetype:asax@*@*@0 +Language@adobe@flash_player@*@filetype:swf@*@*@0 +Language@adobe@flash_player@*@filetype:fla@*@*@0 +Language@adobe@flash_player@*@filetype:as@*@*@0 +Language@adobe@flash_player@*@filetype:clr@*@*@0 +Language@adobe@flash_player@*@filetype:act@*@*@0 +Language@perl@perl@*@filetype:pl@*@*@0 +Language@perl@perl@*@filetype:psgi@*@*@0 +Tool@squirrelmail@squirrelmail@*@inurl:/webmail/src/login.php@*@*@1 +Tool@squirrelmail@squirrelmail@*@inurl:/sqwebmail/@*@*@0 +Tool@squirrelmail@squirrelmail@*@inurl:/webmail/@*@*@0 +Tool@apache@subversion@*@inurl:/svn/@*@*@0 +Tool@apache@subversion@*@inurl:/.svn/entries@*@*@0 +Tool@apache@subversion@*@inurl:/repos/@*@*@0 +Tool@hping@visitors@*@inurl:/visitors/@*@*@0 +Tool@hping@visitors@*@inurl:/log/@*@*@0 +Tool@webdruid@webdruid@*@inurl:/webdruid/@*@*@0 +Tool@webdruid@webdruid@*@inurl:/usage/@*@*@0 +Tool@htdig@htdig@*@inurl:/htdig/@*@*@0 +Tool@htdig@htdig@*@inurl:/htsearch/@*@*@0 +Tool@gnu@mailman@*@inurl:/mailman/@*@.*Delivered by (Mailman.*[0-9])</td>.*@0 +Tool@gnu@mailman@*@inurl:/mailman/admin/@*@.*Delivered by (Mailman.*[0-9])</td>.*@0 +Tool@gnu@mailman@*@inurl:/mailman/admin/mailman@*@.*Delivered by (Mailman.*[0-9])</td>.*@0 +Tool@gnu@mailman@*@inurl:/mailman/create@*@.*Delivered by (Mailman.*[0-9])</td>.*@0 +Tool@phpmyadmin@phpmyadmin@*@inurl:/phpmyadmin/@*@.*<title>(phpMyAdmin.*[0-9]).*@1 +Tool@phpmyadmin@phpmyadmin@*@inurl:/phpmyadmin/index.php@*@.*(phpMyAdmin.*[0-9]).*@1 +Tool@phpmyadmin@phpmyadmin@*@inurl:/phpMyAdmin/@*@.*(phpMyAdmin.*[0-9]).*@1 +Tool@phpmyadmin@phpmyadmin@*@inurl:/phpMyAdmin/index.php@*@.*(phpMyAdmin.*[0-9]).*@1 +Tool@awstats@awstats@*@inurl:/webstat/@*@*@0 +Tool@mrtg@mrtgconfig@*@inurl:/mrtg/@*@*@0 +Tool@mrtg@multi_router_traffic_grapher_cgi@*@inurl:/mrtg/@*@*@0 +Tool@webmin@webmin@*@inurl:/webmin/@*@*@0 +Tool@phpbb@phpbb@*@inurl:/phpbb/@*@*@0 +Tool@dwr@dwr@*@inurl:/dwr/index.html@*@*@0 +*@*@*@*@intitle:"index of"@*@*@0 +*@*@*@*@filetype:bak@*@*@0 +*@*@*@*@filetype:bk@*@*@0 +*@*@*@*@filetype:backup@*@*@0 +*@*@*@*@filetype:old@*@*@0 +*@*@*@*@filetype:org@*@*@0 +*@*@*@*@filetype:csv@*@*@0 +*@*@*@*@filetype:tsv@*@*@0 +*@*@*@*@filetype:sys@*@*@0 +*@*@*@*@filetype:mdb@*@*@0 +*@*@*@*@filetype:dat@*@*@0 +*@*@*@*@filetype:doc@*@*@0 +*@*@*@*@filetype:docx@*@*@0 +*@*@*@*@filetype:xls@*@*@0 +*@*@*@*@filetype:xlsx@*@*@0 +*@*@*@*@filetype:txt@*@*@0 +*@*@*@*@filetype:xml@*@*@0 +*@*@*@*@filetype:conf@*@*@0 +*@*@*@*@filetype:config@*@*@0 +*@*@*@*@filetype:ini@*@*@0 +*@*@*@*@filetype:inf@*@*@0 +*@*@*@*@filetype:tmp@*@*@0 +*@*@*@*@filetype:temp@*@*@0 +*@*@*@*@filetype:dmp@*@*@0 +*@*@*@*@filetype:dump@*@*@0 +*@*@*@*@filetype:sql@*@*@0 +*@*@*@*@filetype:db@*@*@0 +*@*@*@*@filetype:log@*@*@0 +*@*@*@*@filetype:sh@*@*@0 +*@*@*@*@filetype:bat@*@*@0 +*@*@*@*@filetype:cmd@*@*@0 +*@*@*@*@filetype:exe@*@*@0 +*@*@*@*@filetype:bin@*@*@0 +*@*@*@*@filetype:dll@*@*@0 +*@*@*@*@filetype:zip@*@*@0 +*@*@*@*@filetype:lzh@*@*@0 +*@*@*@*@filetype:gzip@*@*@0 +*@*@*@*@filetype:rar@*@*@0 +*@*@*@*@filetype:tar@*@*@0 +*@*@*@*@filetype:tgz@*@*@0 +*@*@*@*@filetype:Z@*@*@0 +*@*@*@*@filetype:7z@*@*@0 diff --git a/util.py b/util.py index 43d79fe..45083ec 100644 --- a/util.py +++ b/util.py @@ -4,12 +4,12 @@ import sys import string import random -import codecs -import json +import urllib3 +import socket +import ipaddress import configparser -from urllib3 import util from datetime import datetime -from subprocess import Popen +from logging import getLogger, FileHandler, StreamHandler, Formatter # Printing colors. OK_BLUE = '\033[94m' # [*] @@ -36,22 +36,27 @@ def __init__(self): # Read config.ini. full_path = os.path.dirname(os.path.abspath(__file__)) config = configparser.ConfigParser() + config.read(os.path.join(full_path, 'config.ini')) + try: - config.read(os.path.join(full_path, './classifier4gyoithon/config.ini')) - except FileExistsError as err: - self.print_message(FAIL, 'File exists error: {}'.format(err)) + self.banner_delay = float(config['Common']['banner_delay']) + self.report_date_format = config['Common']['date_format'] + self.con_timeout = float(config['Common']['con_timeout']) + self.log_dir = config['Common']['log_path'] + self.log_file = config['Common']['log_file'] + self.log_path = os.path.join(os.path.join(full_path, self.log_dir), self.log_file) + self.modules_dir = config['Common']['module_path'] + except Exception as e: + self.print_message(FAIL, 'Reading config.ini is failure : {}'.format(e)) sys.exit(1) - # Utility setting value. - self.http_timeout = float(config['Utility']['http_timeout']) - - # Spider setting value. - self.output_base_path = config['Spider']['output_base_path'] - self.store_path = os.path.join(full_path, self.output_base_path) - if os.path.exists(self.store_path) is False: - os.mkdir(self.store_path) - self.output_filename = config['Spider']['output_filename'] - self.spider_delay_time = config['Spider']['delay_time'] + # Setting logger. + self.logger = getLogger('GyoiThon') + self.logger.setLevel(20) + file_handler = FileHandler(self.log_path) + self.logger.addHandler(file_handler) + formatter = Formatter('%(levelname)s,%(message)s') + file_handler.setFormatter(formatter) # Print metasploit's symbol. def print_message(self, type, message): @@ -85,6 +90,10 @@ def print_exception(self, e, message): self.print_message(WARNING, '{}'.format(e)) self.print_message(WARNING, message) + # Write logs. + def write_log(self, loglevel, message): + self.logger.log(loglevel, self.get_current_date() + ' ' + message) + # Create random string. def get_random_token(self, length): chars = string.digits + string.ascii_letters @@ -99,8 +108,11 @@ def get_current_date(self, indicate_format=None): return datetime.now().strftime(date_format) # Transform date from string to object. - def transform_date_object(self, target_date): - return datetime.strptime(target_date, self.report_date_format) + def transform_date_object(self, target_date, format=None): + if format is None: + return datetime.strptime(target_date, self.report_date_format) + else: + return datetime.strptime(target_date, format) # Transform date from object to string. def transform_date_string(self, target_date): @@ -116,42 +128,74 @@ def delete_ctrl_char(self, origin_text): clean_text += chr(ord_num) return clean_text - # Running spider. - def run_spider(self, scheme_list, target_ip, target_port, target_path): - # Execute crawling using Scrapy. - all_targets_log = [] - for scheme in scheme_list: - target_url = scheme + '://' + target_ip + ':' + target_port + target_path - target_log = [target_url] - response_log = target_ip + '_' + target_port + '.log' - now_time = self.get_current_date('%Y%m%d%H%M%S') - result_file = os.path.join(self.output_base_path, now_time + self.output_filename) - option = ' -a target_url=' + target_url + ' -a allow_domain=' + target_ip + \ - ' -a delay=' + self.spider_delay_time + ' -a store_path=' + self.store_path + \ - ' -a response_log=' + response_log + ' -o ' + result_file - command = 'scrapy runspider Spider.py' + option - proc = Popen(command, shell=True) - proc.wait() - - # Get crawling result. - dict_json = {} - if os.path.exists(result_file): - with codecs.open(result_file, 'r', encoding='utf-8') as fin: - target_text = self.delete_ctrl_char(fin.read()) - if target_text != '': - dict_json = json.loads(target_text) - else: - self.print_message(WARNING, '[{}] is empty.'.format(result_file)) - continue - - # Exclude except allowed domains. - for idx in range(len(dict_json)): - items = dict_json[idx]['urls'] - for item in items: - try: - if target_ip == util.parse_url(item).host: - target_log.append(item) - except Exception as err: - self.print_exception(err, 'Parsed error: {}'.format(item)) - all_targets_log.append([target_url, os.path.join(self.store_path, response_log), list(set(target_log))]) - return all_targets_log + # Check IP address format. + def is_valid_ip(self, arg): + try: + ipaddress.ip_address(arg) + return True + except ValueError: + return False + + # Check argument values. + def check_arg_value(self, protocol, fqdn, port, path): + # Check protocol. + if protocol not in ['http', 'https']: + self.print_message(FAIL, 'Invalid protocol : {}'.format(protocol)) + + # Check IP address. + if isinstance(fqdn, str) is False and isinstance(fqdn, int) is False: + self.print_message(FAIL, 'Invalid IP address : {}'.format(fqdn)) + return False + + # Check port number. + if port.isdigit() is False: + self.print_message(FAIL, 'Invalid port number : {}'.format(port)) + return False + elif (int(port) < 1) or (int(port) > 65535): + self.print_message(FAIL, 'Invalid port number : {}'.format(port)) + return False + + # Check path. + if isinstance(path, str) is False and isinstance(path, int) is False: + self.print_message(FAIL, 'Invalid path : {}'.format(path)) + return False + elif path.startswith('/') is False or path.endswith('/') is False: + self.print_message(FAIL, 'Invalid path : {}'.format(path)) + return False + + return True + + # Send http request. + def send_request(self, method, target_url): + res_header = '' + res_body = '' + server_header = '-' + res = None + http = urllib3.PoolManager(timeout=self.con_timeout) + try: + res = http.request(method, target_url) + for header in res.headers.items(): + res_header += header[0] + ': ' + header[1] + '\r\n' + if header[0].lower() == 'server': + server_header = header[0] + ': ' + header[1] + res_body = '\r\n\r\n' + res.data.decode('utf-8') + except Exception as e: + self.print_exception(e, 'Access is failure : {}'.format(target_url)) + self.write_log(30, 'Accessing is failure : {}'.format(target_url)) + return res, server_header, res_header, res_body + + # Forward lookup. + def forward_lookup(self, fqdn): + try: + return socket.gethostbyname(fqdn) + except Exception as e: + self.print_exception(e, 'Forward lookup error: {}'.format(fqdn)) + return 'unknown' + + # Reverse lookup. + def reverse_lookup(self, ip_addr): + try: + return socket.gethostbyaddr(ip_addr) + except Exception as e: + self.print_exception(e, 'Reverse lookup error: {}'.format(ip_addr)) + return 'unknown'