Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
42 changed files
with
30,327 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1 +1,18 @@ | ||
# Hachi: An Intelligent threat mapper | ||
|
||
|
||
ATT&CK framework has become a benchmark in the security domain. ATT&CK provides data about each technique used across different attack stages. Hachi was created to contribute to the ATT&CK community. Hachi is based on the radare2 framework and uses data provided by ATT&CK to map the symptoms of malware on ATT&CK matrix. | ||
|
||
Following modules of Hachi make this tool a great addition to an analyst’s or company’s armaments: | ||
|
||
• Threat Intel: Hachi provides threat intelligence data like a possible parent campaign or author of a malware file. | ||
• Malware behavior: It uncovers core malware behaviors using automated static analysis coupled with symbolic execution to explore multiple execution paths and maps it on ATT&CK matrix. | ||
• RESTful API: Hachi provides RESTful API which enables this tool to seamlessly integration with malware processing frameworks. | ||
• Visualization: It allows for the creation of detailed visual reports. | ||
|
||
|
||
References: | ||
https://attack.mitre.org/ | ||
https://www.radare.org/get/THC2018.pdf | ||
https://github.com/pinkflawd/r2graphity | ||
https://github.com/Yara-Rules/rules |
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,157 @@ | ||
import os | ||
import web | ||
import uuid | ||
import json | ||
import hashlib | ||
import pythoncom | ||
import win32com.client | ||
from utils import db_comm | ||
from utils.config import Config | ||
from utils.mitre_table import table_creation | ||
|
||
urls = ('/', 'Upload', | ||
'/report/download/(.+)', 'Images', | ||
'/report/images/(.+)', 'Images', | ||
'/report/(.+)', 'Reporting', | ||
'/images/(.*)', 'Images' | ||
) | ||
|
||
opts = Config().read_config() | ||
pythoncom.CoInitialize() | ||
qinfo = win32com.client.Dispatch("MSMQ.MSMQQueueInfo") | ||
computer_name = os.getenv('COMPUTERNAME') | ||
queue_name = opts["config"]["QUEUE_NAME"] | ||
qinfo.FormatName = "direct=os:" + computer_name + "\\PRIVATE$\\" + queue_name | ||
|
||
|
||
class Reporting: | ||
|
||
def __init__(self): | ||
pass | ||
|
||
def GET(self, uid): | ||
render = web.template.frender('templates/reporting.html') | ||
filename = db_comm.get_column_val('uid', uid, 'filepath') | ||
anomalies = [] | ||
file_info = {} | ||
cert_info = {} | ||
table_data = {} | ||
static_info = {} | ||
campaign_info = {} | ||
suspicious_api_seq = [] | ||
report_path = os.path.join(opts["config"]["OUTPUT_DIR"], uid) | ||
if os.path.exists(os.path.join(report_path, uid + '.campaign.json')): | ||
with open(os.path.join(report_path, uid + '.campaign.json'), 'rb') as fp: | ||
campaign_info = json.load(fp) | ||
if os.path.exists(os.path.join(report_path, uid + '.basic_info.json')): | ||
with open(os.path.join(report_path, uid+'.basic_info.json'), 'rb') as fp: | ||
file_info = json.load(fp) | ||
if os.path.exists(os.path.join(report_path, uid + '.static.json')): | ||
with open(os.path.join(report_path, uid + '.static.json'), | ||
'rb') as fp: | ||
static_info = json.load(fp) | ||
if os.path.exists(os.path.join(report_path, uid + '.cert.json')): | ||
with open(os.path.join(report_path, uid + '.cert.json'), | ||
'rb') as fp: | ||
cert_info = json.load(fp) | ||
if os.path.exists(os.path.join(report_path, uid + '.yara.json')): | ||
with open(os.path.join(report_path, uid + '.yara.json'), | ||
'rb') as fp: | ||
suspicious = json.load(fp) | ||
if "Yara Matched" in suspicious: | ||
for tag in suspicious["Yara Matched"].keys(): | ||
for rule_name in suspicious["Yara Matched"][tag].keys(): | ||
if "description" in suspicious["Yara Matched"][tag][rule_name]: | ||
anomalies.append(suspicious["Yara Matched"][tag][rule_name]["description"]) | ||
if os.path.exists(os.path.join(report_path, uid + '.behav.json')): | ||
with open(os.path.join(report_path, uid + '.behav.json'), | ||
'rb') as fp: | ||
behav_json = json.load(fp) | ||
if "Suspicious Behaviors" in behav_json: | ||
for api_seq in behav_json["Suspicious Behaviors"].keys(): | ||
suspicious_api_seq.append(api_seq) | ||
if os.path.exists('utils\mitre.json'): | ||
with open('utils\mitre.json', 'rb') as fp: | ||
mitre_json = json.load(fp) | ||
if os.path.exists(os.path.join(report_path, uid + '.mitre.json')): | ||
with open(os.path.join(report_path, uid+'.mitre.json'), 'rb') as fs: | ||
sig_json = json.load(fs) | ||
table_data = table_creation(sig_json, mitre_json) | ||
if os.path.exists(os.path.join(report_path, uid + '.png')): | ||
png_name = uid + '.png' | ||
else: | ||
png_name = 'Hachi-Logo.png' | ||
html_data = render(uid, filename, file_info, campaign_info, table_data, static_info, cert_info, anomalies, | ||
suspicious_api_seq, png_name) | ||
return html_data | ||
|
||
|
||
class Upload: | ||
def __init__(self): | ||
pass | ||
|
||
def GET(self): | ||
render = web.template.frender('templates/hachi.html') | ||
row = db_comm.get_data() | ||
sample_count = db_comm.count('uid') | ||
pending_count = db_comm.count_condition('uid', 'STATUS', 'PENDING') | ||
complete_count = db_comm.count_condition('uid', 'STATUS', 'COMPLETED') | ||
fail_count = db_comm.count_condition('uid', 'STATUS', 'FAILED') | ||
status_count = [complete_count, pending_count, fail_count] | ||
html_data = render(row, sample_count, status_count) | ||
return html_data | ||
|
||
def POST(self): | ||
x = web.input(myfile={}) | ||
filename = x['myfile'].filename | ||
if filename != "" and filename is not None: | ||
uid = uuid.uuid4() | ||
folderpath = os.path.join(opts["config"]["INPUT_DIR"], str(uid)) | ||
os.mkdir(folderpath) | ||
out_folderpath = os.path.join(opts["config"]["OUTPUT_DIR"], str(uid)) | ||
os.mkdir(out_folderpath) | ||
with open(os.path.join(folderpath, str(uid)), 'wb') as fp: | ||
fp.write(x['myfile'].file.read()) | ||
|
||
queue = qinfo.Open(2, 0) # Open a ref to queue | ||
msg = win32com.client.Dispatch("MSMQ.MSMQMessage") | ||
msg.Label = "TestMsg" | ||
msg.Body = str(uid) | ||
msg.Send(queue) | ||
queue.Close() | ||
with open(os.path.join(folderpath, str(uid)), 'rb') as fp: | ||
sha2 = hashlib.sha256(fp.read()).hexdigest() | ||
db_comm.insert(str(uid), sha2, filename, "PENDING") | ||
raise web.seeother('/') | ||
|
||
|
||
class Images: | ||
def __init__(self): | ||
pass | ||
|
||
def GET(self, name): | ||
ext = name.split(".")[-1] # Gather extension | ||
|
||
cType = { | ||
"png": "image/png", | ||
"jpg": "image/jpeg", | ||
"gif": "image/gif", | ||
"ico": "image/x-icon", | ||
"zip": "application/octet-stream" | ||
} | ||
|
||
if name in os.listdir('images'): # Security | ||
web.header("Content-Type", cType[ext]) # Set the Header | ||
return open('images/%s' % name, "rb").read() # Notice 'rb' for reading images | ||
else: | ||
for root, dir, filenames in os.walk(opts["config"]["OUTPUT_DIR"]): | ||
if name in filenames: | ||
png_path = os.path.join(root, name) | ||
web.header("Content-Type", cType[ext]) | ||
return open(png_path, "rb").read() | ||
raise web.notfound() | ||
|
||
|
||
if __name__ == "__main__": | ||
app = web.application(urls, globals()) | ||
app.run() |
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file not shown.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
r2pipe==0.9.9 | ||
pypiwin32==219 | ||
yara_python==3.8.1 | ||
web.py==0.39 | ||
pefile==2017.11.5 | ||
networkx==2.2 | ||
web==0.6.0 | ||
yara==1.7.7 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,151 @@ | ||
import os | ||
import yara | ||
import json | ||
import pefile | ||
import zipfile | ||
import hashlib | ||
import win32com.client | ||
from utils import db_comm | ||
from utils import peparser | ||
from utils.config import Config | ||
from utils.yarascan import YaraScan | ||
from utils.playbookSig import playbooksig | ||
from utils.digicheck import DigitalSignatureCheck | ||
from utils.graphity.graphity import get_behaviors | ||
|
||
RE_EMBEDDED_FILE = r'0x([A-F0-9]+)\s+([0-9]+)\s+([^,:\(\.]+)' | ||
|
||
opts = Config().read_config() | ||
|
||
|
||
def zipdir(path, ziph): | ||
for root, dirs, files in os.walk(path): | ||
for file in files: | ||
ziph.write(os.path.join(root, file)) | ||
|
||
|
||
def process_file(yara_scan, yara_rules, yara_id_rules, yara_mitre_rules, input_file, output_file_static, | ||
outputfile_mitre): | ||
with open(input_file, 'rb') as f: | ||
file_data = f.read() | ||
|
||
yara_mitre_rules.match(data=file_data, callback=yara_scan.yara_callback_desc, | ||
which_callbacks=yara.CALLBACK_MATCHES) | ||
json_data = yara_scan.yara_sig_matched | ||
with open(outputfile_mitre, 'w') as fw: | ||
json_report = json.dumps(json_data, sort_keys=True, indent=4) | ||
fw.write(json_report.encode('utf-8')) | ||
|
||
json_data = {} | ||
yara_id_rules.match(data=file_data, callback=yara_scan.yara_callback, which_callbacks=yara.CALLBACK_MATCHES) | ||
json_data['File Type Information'] = yara_scan.yara_idsig_matched | ||
|
||
yara_scan.yara_sig_matched = {} | ||
yara_rules.match(data=file_data, callback=yara_scan.yara_callback_desc, which_callbacks=yara.CALLBACK_MATCHES) | ||
json_data['Yara Matched'] = yara_scan.yara_sig_matched | ||
|
||
with open(output_file_static, 'w') as fw: | ||
json_report = json.dumps(json_data, sort_keys=True, indent=4) | ||
fw.write(json_report.encode('utf-8')) | ||
return json_data | ||
|
||
|
||
def process_dir(src_dir, dst_dir): | ||
|
||
print("Processing: " + src_dir + " ...") | ||
yara_scan = YaraScan() | ||
yara_rules = yara.compile('./yara_sigs/index.yar') | ||
yara_idrules = yara.compile('./yara_sigs/index_id.yar') | ||
yara_mitre_rules = yara.compile('./yara_sigs/index_mitre.yar') | ||
|
||
for root_dir, dirs, files in os.walk(src_dir): | ||
for filename in files: | ||
print(filename) | ||
src_file = os.path.join(root_dir, filename) | ||
try: | ||
pefile.PE(src_file) | ||
print "PE File loaded" | ||
with open(src_file, 'rb') as f: | ||
contents = f.read() | ||
file_size = len(contents) | ||
sha1 = hashlib.sha1(contents).hexdigest() | ||
sha2 = hashlib.sha256(contents).hexdigest() | ||
# md5 accepts only chunks of 128*N bytes | ||
md5_obj = hashlib.md5() | ||
for i in range(0, len(contents), 8192): | ||
md5_obj.update(contents[i:i + 8192]) | ||
md5 = md5_obj.hexdigest() | ||
except Exception as e: | ||
print("Skipping: " + src_file) | ||
print("Error: " + str(e)) | ||
return | ||
|
||
basic_info = {'MD5': md5, 'SHA1': sha1, 'SHA256': sha2, 'File Size': file_size} | ||
|
||
with open(os.path.join(dst_dir, filename) + ".basic_info.json", 'wb') as fw: | ||
json.dump(basic_info, fw) | ||
peparsed = peparser.parse(src_file) | ||
with open(os.path.join(dst_dir, filename) + ".static.json", 'wb') as fp: | ||
json.dump(peparsed, fp) | ||
dst_file_static = os.path.join(dst_dir, filename) + ".yara.json" | ||
dst_file_mitre = os.path.join(dst_dir, filename) + ".mitre.json" | ||
# run yara rules on file | ||
process_file(yara_scan, yara_rules, yara_idrules, yara_mitre_rules, src_file, dst_file_static, | ||
dst_file_mitre) | ||
|
||
dst_file = os.path.join(dst_dir, filename) + ".behav.json" | ||
get_behaviors(src_file, dst_file, dst_dir) | ||
if os.path.exists(os.path.join(dst_dir, filename) + ".behav.json"): | ||
with open(os.path.join(dst_dir, filename) + ".behav.json", 'rb') as fp: | ||
file_data = fp.read() | ||
|
||
json_data = {} | ||
yara_mitre_api = yara.compile('.\\yara_sigs\\mitre\\api_based.yar') | ||
yara_scan.yara_sig_matched = {} | ||
yara_mitre_api.match(data=file_data, callback=yara_scan.yara_callback_desc, | ||
which_callbacks=yara.CALLBACK_MATCHES) | ||
json_data['API_MITRE'] = yara_scan.yara_sig_matched | ||
with open(dst_file_mitre, 'rb') as fs: | ||
mitre_matched_json = json.loads(fs.read()) | ||
for matched_tid in mitre_matched_json.keys(): | ||
if matched_tid in json_data['API_MITRE']: | ||
mitre_matched_json[matched_tid].update(json_data['API_MITRE'][matched_tid]) | ||
with open(dst_file_mitre, 'wb') as fs: | ||
fs.write(json.dumps(mitre_matched_json, sort_keys=True, indent=4).encode('utf-8')) | ||
dst_campaign_file = os.path.join(dst_dir, filename) + ".campaign.json" | ||
playbooksig(opts["config"]["PLAYBOOK_JSON"], dst_file_mitre, dst_campaign_file) | ||
|
||
with open(os.path.join(dst_dir, filename) + ".cert.json", 'wb') as fp: | ||
DigiSig = DigitalSignatureCheck() | ||
DigiSig.run(src_file) | ||
json.dump(DigiSig._REQ_DATA_FIELD, fp) | ||
report_folder_name = dst_dir.split("\\")[-1] | ||
zipf = zipfile.ZipFile(os.path.join(opts["config"]["OUTPUT_DIR"], report_folder_name+'.zip'), 'w', | ||
zipfile.ZIP_DEFLATED) | ||
zipdir(dst_dir, zipf) | ||
zipf.close() | ||
return True | ||
|
||
|
||
def check_queue(): | ||
qinfo = win32com.client.Dispatch("MSMQ.MSMQQueueInfo") | ||
computer_name = os.getenv('COMPUTERNAME') | ||
qinfo.FormatName = "direct=os:" + computer_name + "\\PRIVATE$\\" + opts["config"]["QUEUE_NAME"] | ||
queue = qinfo.Open(1, 0) # Open a ref to queue to read(1) | ||
while True: | ||
msg = queue.Receive() | ||
if msg: | ||
print("Found new sample:") | ||
print "Label:", msg.Label | ||
print "Body :", msg.Body | ||
bDone = process_dir(os.path.join(opts["config"]["INPUT_DIR"], msg.Body.encode('utf-8')), | ||
os.path.join(opts["config"]["OUTPUT_DIR"], msg.Body.encode('utf-8'))) | ||
if bDone: | ||
db_comm.update(msg.Body.encode('utf-8'), "COMPLETED") | ||
else: | ||
db_comm.update(msg.Body.encode('utf-8'), "FAILED") | ||
queue.Close() | ||
|
||
|
||
if __name__ == '__main__': | ||
check_queue() |
Oops, something went wrong.