Skip to content

Commit

Permalink
Update Artifacts
Browse files Browse the repository at this point in the history
Add sourcefile to tsv files
add usernames to tsv files that have users
This is for Autopsy intergration
  • Loading branch information
markmckinnon committed Jul 1, 2023
1 parent d5bfd74 commit 68b688b
Show file tree
Hide file tree
Showing 39 changed files with 5,182 additions and 149 deletions.
4,961 changes: 4,961 additions & 0 deletions .gitignore

Large diffs are not rendered by default.

20 changes: 10 additions & 10 deletions scripts/artifacts/apacheLogs.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def get_apache_logs(files_found, report_folder, seeker, wrap_text):
user_agent = entry.headers_in['User-Agent']
status = entry.final_status
bytes_out = entry.bytes_out
temp_data_list = (timestamp, remote_ip, request, status, bytes_out, referer, user_agent)
temp_data_list = (timestamp, remote_ip, request, status, bytes_out, referer, user_agent, file_found)
data_list.append(temp_data_list)
if remote_ip in ip_connection_bytes_dict:
ip_connection_bytes_dict[remote_ip] = ip_connection_bytes_dict[remote_ip] + bytes_out
Expand Down Expand Up @@ -66,7 +66,7 @@ def get_apache_logs(files_found, report_folder, seeker, wrap_text):
report_path = get_next_unused_name(report_path)[:-9] # remove .temphtml
report.start_artifact_report(report_folder, os.path.basename(report_path))
report.add_script()
data_headers = ('timestamp', 'remote_ip', 'request', 'status', 'bytes_out', 'referer', 'user_agent')
data_headers = ('timestamp', 'remote_ip', 'request', 'status', 'bytes_out', 'referer', 'user_agent','sourcefile')

report.write_artifact_data_table(data_headers, data_list, file_found)
report.end_artifact_report()
Expand All @@ -87,11 +87,11 @@ def get_apache_logs(files_found, report_folder, seeker, wrap_text):
report_path = get_next_unused_name(report_path)[:-9] # remove .temphtml
report.start_artifact_report(report_folder, os.path.basename(report_path))
report.add_script()
data_headers = ('ip_address', 'num_of_bytes')
data_headers = ('ip_address', 'num_of_bytes', 'sourcefile')

ip_bytes = []
for ip_key in ip_connection_bytes_dict.keys():
ip_bytes.append((ip_key, ip_connection_bytes_dict[ip_key]))
ip_bytes.append((ip_key, ip_connection_bytes_dict[ip_key], file_found))

report.write_artifact_data_table(data_headers, ip_bytes, file_found)
report.end_artifact_report()
Expand All @@ -110,11 +110,11 @@ def get_apache_logs(files_found, report_folder, seeker, wrap_text):
report_path = get_next_unused_name(report_path)[:-9] # remove .temphtml
report.start_artifact_report(report_folder, os.path.basename(report_path))
report.add_script()
data_headers = ('ip_address', 'num_of_resources_accessed')
data_headers = ('ip_address', 'num_of_resources_accessed', 'sourcefile')

ip_page_accesses = []
for ip_key in ip_connection_pages_dict.keys():
ip_page_accesses.append((ip_key, ip_connection_pages_dict[ip_key]))
ip_page_accesses.append((ip_key, ip_connection_pages_dict[ip_key], file_found))

report.write_artifact_data_table(data_headers, ip_page_accesses, file_found)
report.end_artifact_report()
Expand All @@ -133,11 +133,11 @@ def get_apache_logs(files_found, report_folder, seeker, wrap_text):
report_path = get_next_unused_name(report_path)[:-9] # remove .temphtml
report.start_artifact_report(report_folder, os.path.basename(report_path))
report.add_script()
data_headers = ('uri', 'num_of_bytes')
data_headers = ('uri', 'num_of_bytes', 'sourcefile')

uri_bytes = []
for uri_key in uri_bytes_dict.keys():
uri_bytes.append((uri_key, uri_bytes_dict[uri_key]))
uri_bytes.append((uri_key, uri_bytes_dict[uri_key], file_found))

report.write_artifact_data_table(data_headers, uri_bytes, file_found)
report.end_artifact_report()
Expand Down Expand Up @@ -179,11 +179,11 @@ def get_apache_logs(files_found, report_folder, seeker, wrap_text):
report_path = get_next_unused_name(report_path)[:-9] # remove .temphtml
report.start_artifact_report(report_folder, os.path.basename(report_path))
report.add_script()
data_headers = ('request_method', 'num_of_bytes')
data_headers = ('request_method', 'num_of_bytes', 'sourcefile')

method_bytes = []
for method_key in request_method_bytes.keys():
method_bytes.append((method_key, request_method_bytes[method_key]))
method_bytes.append((method_key, request_method_bytes[method_key], file_found))

report.write_artifact_data_table(data_headers, method_bytes, file_found)
report.end_artifact_report()
Expand Down
12 changes: 6 additions & 6 deletions scripts/artifacts/aptHistory.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,14 +37,14 @@ def get_apt_history_log(files_found, report_folder, seeker, wrap_text):
removed_program = line_list[1]
removed_programs = removed_program.split("), ")
for program in removed_programs:
data_removed_programs_list.append((program.split(' (')[0], start_date, start_epoch_time))
data_removed_programs_list.append((program.split(' (')[0], start_date, start_epoch_time, file_found))
if "End-Date" in line:
line_list = line.split(": ")
end_date = line_list[1]
if "Install" in history_type:
data_list.append((start_date, command_line, history_type, installed_program, end_date, start_epoch_time))
data_list.append((start_date, command_line, history_type, installed_program, end_date, start_epoch_time, file_found))
else:
data_list.append((start_date, command_line, history_type, removed_program, end_date, start_epoch_time))
data_list.append((start_date, command_line, history_type, removed_program, end_date, start_epoch_time, file_found))

usageentries = len(data_list)
if usageentries > 0:
Expand All @@ -54,7 +54,7 @@ def get_apt_history_log(files_found, report_folder, seeker, wrap_text):
report_path = get_next_unused_name(report_path)[:-9] # remove .temphtml
report.start_artifact_report(report_folder, os.path.basename(report_path))
report.add_script()
data_headers = ('start_date', 'commandline', 'type', 'programs', 'end_date', 'start_date_epoch')
data_headers = ('start_date', 'commandline', 'type', 'programs', 'end_date', 'start_date_epoch', 'sourcefile')

report.write_artifact_data_table(data_headers, data_list, file_found)
report.end_artifact_report()
Expand All @@ -75,7 +75,7 @@ def get_apt_history_log(files_found, report_folder, seeker, wrap_text):
report_path = get_next_unused_name(report_path)[:-9] # remove .temphtml
report.start_artifact_report(report_folder, os.path.basename(report_path))
report.add_script()
data_headers = ('installed_program', 'date_installed', 'date_installed_epoch')
data_headers = ('installed_program', 'date_installed', 'date_installed_epoch', 'sourcefile')
report.write_artifact_data_table(data_headers, data_installed_programs_list, file_found)
report.end_artifact_report()

Expand All @@ -95,7 +95,7 @@ def get_apt_history_log(files_found, report_folder, seeker, wrap_text):
report_path = get_next_unused_name(report_path)[:-9] # remove .temphtml
report.start_artifact_report(report_folder, os.path.basename(report_path))
report.add_script()
data_headers = ('removed_program', 'date_removed', 'date_removed_epoch')
data_headers = ('removed_program', 'date_removed', 'date_removed_epoch', 'sourcefile')

report.write_artifact_data_table(data_headers, data_removed_programs_list, file_found)
report.end_artifact_report()
Expand Down
9 changes: 6 additions & 3 deletions scripts/artifacts/authLog.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,13 +49,15 @@ def get_auth_log(files_found, report_folder, seeker, wrap_text):
temp_data_list.append(process)
temp_data_list.append(xtype)
temp_data_list.append(message)
temp_data_list.append(file_found)
data_list.append(temp_data_list)

if 'sudo' in process:
if 'pam' not in xtype:
sudo_temp_data_list.append(timestamp)
sudo_temp_data_list.append(host)
sudo_temp_data_list.append(xtype)
sudo_temp_data_list.append(file_found)
for sudo_data in message.split(' ; '):
sudo_temp_data_list.append(sudo_data)
sudo_data_list.append(sudo_temp_data_list)
Expand All @@ -65,6 +67,7 @@ def get_auth_log(files_found, report_folder, seeker, wrap_text):
failed_temp_data_list.append(host)
failed_temp_data_list.append(process)
failed_temp_data_list.append(message)
failed_temp_data_list.append(file_found)
failed_data_list.append(failed_temp_data_list)

usageentries = len(data_list)
Expand All @@ -75,7 +78,7 @@ def get_auth_log(files_found, report_folder, seeker, wrap_text):
report_path = get_next_unused_name(report_path)[:-9] # remove .temphtml
report.start_artifact_report(report_folder, os.path.basename(report_path))
report.add_script()
data_headers = ('timestamp', 'host', 'process', 'type', 'message')
data_headers = ('timestamp', 'host', 'process', 'type', 'message', 'sourcefile')

report.write_artifact_data_table(data_headers, data_list, file_found)
report.end_artifact_report()
Expand All @@ -96,7 +99,7 @@ def get_auth_log(files_found, report_folder, seeker, wrap_text):
report_path = get_next_unused_name(report_path)[:-9] # remove .temphtml
report.start_artifact_report(report_folder, os.path.basename(report_path))
report.add_script()
data_headers = ('timestamp', 'host', 'user', 'terminal', 'print_working_directory', 'run_as', 'command')
data_headers = ('timestamp', 'host', 'user', 'terminal', 'print_working_directory', 'run_as', 'command', 'sourcefile')

report.write_artifact_data_table(data_headers, sudo_data_list, file_found)
report.end_artifact_report()
Expand All @@ -117,7 +120,7 @@ def get_auth_log(files_found, report_folder, seeker, wrap_text):
report_path = get_next_unused_name(report_path)[:-9] # remove .temphtml
report.start_artifact_report(report_folder, os.path.basename(report_path))
report.add_script()
data_headers = ('timestamp', 'host', 'process', 'message')
data_headers = ('timestamp', 'host', 'process', 'message', 'sourcefile')

report.write_artifact_data_table(data_headers, failed_data_list, file_found)
report.end_artifact_report()
Expand Down
6 changes: 3 additions & 3 deletions scripts/artifacts/bashHistory.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,18 +15,18 @@ def get_bash_history(files_found, report_folder, seeker, wrap_text):
lines = f.readlines()
for line in lines:
temp_data_list = []
temp_data_list = ((user_name, line))
temp_data_list = ((user_name, line, file_found))
data_list.append(temp_data_list)

usageentries = len(data_list)
if usageentries > 0:
report = ArtifactHtmlReport(f'Bash History {user_name}')
#check for existing and get next name for report file, so report from another file does not get overwritten
report_path = os.path.join(report_folder, f'bash_hsitory_{user_name}.temphtml')
report_path = os.path.join(report_folder, f'bash_history_{user_name}.temphtml')
report_path = get_next_unused_name(report_path)[:-9] # remove .temphtml
report.start_artifact_report(report_folder, os.path.basename(report_path))
report.add_script()
data_headers = ['user_name', 'command']
data_headers = ['user_name', 'command', 'sourcefile']

report.write_artifact_data_table(data_headers, data_list, file_found)
report.end_artifact_report()
Expand Down
3 changes: 2 additions & 1 deletion scripts/artifacts/btmp.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ def get_btmp(files_found, report_folder, seeker, wrap_text):
temp_data_list.append(entry.host)
temp_data_list.append(entry.time)
temp_data_list.append(entry.sec)
temp_data_list.append(file_found)
data_list.append(temp_data_list)

usageentries = len(data_list)
Expand All @@ -33,7 +34,7 @@ def get_btmp(files_found, report_folder, seeker, wrap_text):
report_path = get_next_unused_name(report_path)[:-9] # remove .temphtml
report.start_artifact_report(report_folder, os.path.basename(report_path))
report.add_script()
data_headers = ('user', 'terminal', 'host', 'timestamp', 'epoch_timestamp')
data_headers = ('user', 'terminal', 'host', 'timestamp', 'epoch_timestamp', 'sourcefile')

report.write_artifact_data_table(data_headers, data_list, file_found)
report.end_artifact_report()
Expand Down
15 changes: 9 additions & 6 deletions scripts/artifacts/chromium.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@
import textwrap

from scripts.artifact_report import ArtifactHtmlReport
from scripts.lleapfuncs import logfunc, tsv, timeline, is_platform_windows, get_next_unused_name, open_sqlite_db_readonly, get_browser_name
from scripts.lleapfuncs import logfunc, tsv, timeline, is_platform_windows, get_next_unused_name,\
open_sqlite_db_readonly, get_browser_name, get_user_name_from_home


def get_chrome(files_found, report_folder, seeker, wrap_text):
Expand All @@ -17,6 +18,8 @@ def get_chrome(files_found, report_folder, seeker, wrap_text):
if file_found.find('app_sbrowser') >= 0:
browser_name = 'Browser'

user_name = get_user_name_from_home(file_found)

db = open_sqlite_db_readonly(file_found)
cursor = db.cursor()
cursor.execute('''
Expand All @@ -32,19 +35,19 @@ def get_chrome(files_found, report_folder, seeker, wrap_text):
all_rows = cursor.fetchall()
usageentries = len(all_rows)
if usageentries > 0:
report = ArtifactHtmlReport(f'{browser_name} History')
report = ArtifactHtmlReport(f'{browser_name} History - {user_name}')
#check for existing and get next name for report file, so report from another file does not get overwritten
report_path = os.path.join(report_folder, f'{browser_name} History.temphtml')
report_path = os.path.join(report_folder, f'{browser_name} History - {user_name}.temphtml')
report_path = get_next_unused_name(report_path)[:-9] # remove .temphtml
report.start_artifact_report(report_folder, os.path.basename(report_path))
report.add_script()
data_headers = ('Last Visit Time','URL','Title','Visit Count','Hidden')
data_headers = ('Last Visit Time','URL','Title','Visit Count','Hidden', 'username', 'sourcefile')
data_list = []
for row in all_rows:
if wrap_text:
data_list.append((row[0],textwrap.fill(row[1], width=100),row[2],row[3],row[4]))
data_list.append((row[0],textwrap.fill(row[1], width=100),row[2],row[3],row[4], user_name, file_found))
else:
data_list.append((row[0],row[1],row[2],row[3],row[4]))
data_list.append((row[0],row[1],row[2],row[3],row[4], user_name, file_found))
report.write_artifact_data_table(data_headers, data_list, file_found)
report.end_artifact_report()

Expand Down
11 changes: 7 additions & 4 deletions scripts/artifacts/chromiumAutofill.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@
import textwrap

from scripts.artifact_report import ArtifactHtmlReport
from scripts.lleapfuncs import logfunc, tsv, timeline, is_platform_windows, get_next_unused_name, does_column_exist_in_db, open_sqlite_db_readonly, get_browser_name
from scripts.lleapfuncs import logfunc, tsv, timeline, is_platform_windows, get_next_unused_name,\
does_column_exist_in_db, open_sqlite_db_readonly, get_browser_name, get_user_name_from_home

def get_chromeAutofill(files_found, report_folder, seeker, wrap_text):

Expand All @@ -17,6 +18,8 @@ def get_chromeAutofill(files_found, report_folder, seeker, wrap_text):
elif file_found.find('.magisk') >= 0 and file_found.find('mirror') >= 0:
continue # Skip sbin/.magisk/mirror/data/.. , it should be duplicate data??

user_name = get_user_name_from_home(file_found)

db = open_sqlite_db_readonly(file_found)
cursor = db.cursor()

Expand All @@ -35,14 +38,14 @@ def get_chromeAutofill(files_found, report_folder, seeker, wrap_text):
if usageentries > 0:
report = ArtifactHtmlReport(f'{browser_name} Autofill')
#check for existing and get next name for report file, so report from another file does not get overwritten
report_path = os.path.join(report_folder, f'{browser_name} Autofill.temphtml')
report_path = os.path.join(report_folder, f'{browser_name} Autofill - {user_name}.temphtml')
report_path = get_next_unused_name(report_path)[:-9] # remove .temphtml
report.start_artifact_report(report_folder, os.path.basename(report_path))
report.add_script()
data_headers = ('Date Created','Field','Value','Date Last Used','Count')
data_headers = ('Date Created','Field','Value','Date Last Used','Count','username', 'sourcefile')
data_list = []
for row in all_rows:
data_list.append((row[0],row[1],row[2],row[3],row[4]))
data_list.append((row[0],row[1],row[2],row[3],row[4], user_name, file_found))

report.write_artifact_data_table(data_headers, data_list, file_found)
report.end_artifact_report()
Expand Down
13 changes: 8 additions & 5 deletions scripts/artifacts/chromiumBookmarks.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@
import os

from scripts.artifact_report import ArtifactHtmlReport
from scripts.lleapfuncs import logfunc, tsv, timeline, is_platform_windows, get_next_unused_name, get_browser_name
from scripts.lleapfuncs import logfunc, tsv, timeline, is_platform_windows, get_next_unused_name, \
get_browser_name, get_user_name_from_home

def get_chromeBookmarks(files_found, report_folder, seeker, wrap_text):

Expand All @@ -17,6 +18,8 @@ def get_chromeBookmarks(files_found, report_folder, seeker, wrap_text):
if file_found.find('app_sbrowser') >= 0:
browser_name = 'Browser'

user_name = get_user_name_from_home(file_found)

with open(file_found, "r") as f:
dataa = json.load(f)
data_list = []
Expand All @@ -37,16 +40,16 @@ def get_chromeBookmarks(files_found, report_folder, seeker, wrap_text):
if keyb == 'name' and flag == 1:
flag = 0
parent = valueb
data_list.append((dateaddconv, url, name, parent, typed))
data_list.append((dateaddconv, url, name, parent, typed, user_name, file_found))
num_entries = len(data_list)
if num_entries > 0:
report = ArtifactHtmlReport(f'{browser_name} Bookmarks')
report = ArtifactHtmlReport(f'{browser_name} Bookmarks - {username}')
#check for existing and get next name for report file, so report from another file does not get overwritten
report_path = os.path.join(report_folder, f'{browser_name} Bookmarks.temphtml')
report_path = os.path.join(report_folder, f'{browser_name} Bookmarks - {user_name}.temphtml')
report_path = get_next_unused_name(report_path)[:-9] # remove .temphtml
report.start_artifact_report(report_folder, os.path.basename(report_path))
report.add_script()
data_headers = ('Added Date', 'URL', 'Name', 'Parent', 'Type')
data_headers = ('Added Date', 'URL', 'Name', 'Parent', 'Type', 'username', 'sourcefile')
report.write_artifact_data_table(data_headers, data_list, file_found)
report.end_artifact_report()

Expand Down
Loading

0 comments on commit 68b688b

Please sign in to comment.