Skip to content

Commit

Permalink
Update.
Browse files Browse the repository at this point in the history
  • Loading branch information
msuhanov committed Dec 18, 2017
1 parent 798ba91 commit 00a96a1
Show file tree
Hide file tree
Showing 11 changed files with 566 additions and 4 deletions.
7 changes: 7 additions & 0 deletions ChangeLog
@@ -1,3 +1,10 @@
Version: 1.0.7

RegistryCarve: add the HiveReconstructor class to carve and reconstruct fragmented hives, add the progress callbacks.
yarp-carver: reconstruct fragmented hives, print the current progress.

---

Version: 1.0.6

Registry, RegistrySqlite: handle an invalid reference to a parent key when parsing a truncated hive.
Expand Down
2 changes: 1 addition & 1 deletion ReadMe
Expand Up @@ -31,7 +31,7 @@ See the 'License' file.

5. Installation

# pip3 install https://github.com/msuhanov/yarp/archive/1.0.6.tar.gz
# pip3 install https://github.com/msuhanov/yarp/archive/1.0.7.tar.gz

---
(c) Maxim Suhanov
1 change: 1 addition & 0 deletions hives_for_tests/Carving/FragRecon/FragmentReconstruction2

Large diffs are not rendered by default.

Binary file not shown.

Large diffs are not rendered by default.

Binary file not shown.
Binary file not shown.
100 changes: 100 additions & 0 deletions test_cases.py
Expand Up @@ -80,6 +80,12 @@
hive_carving_compressed_noslack = path.join(HIVES_DIR, 'Carving', 'NTFSCompressedNoSlack')
hive_carving_compressed_noslack_1024 = path.join(HIVES_DIR, 'Carving', 'NTFSCompressedNoSlackCluster1024')

hive_recon_2 = path.join(HIVES_DIR, 'Carving', 'FragRecon', 'FragmentReconstruction2')
hive_recon_3 = path.join(HIVES_DIR, 'Carving', 'FragRecon', 'FragmentReconstruction3')
hive_recon_4 = path.join(HIVES_DIR, 'Carving', 'FragRecon', 'FragmentReconstruction4')
hive_recon_2plus1 = path.join(HIVES_DIR, 'Carving', 'FragRecon', 'FragmentReconstruction2plus1')
hive_recon_2and4 = path.join(HIVES_DIR, 'Carving', 'FragRecon', 'FragmentReconstruction2and4')

hive_sqlite = path.join(HIVES_DIR, 'SqliteHive')
hive_reallocvalue_sqlite = path.join(HIVES_DIR, 'ReallocValueHive')
hive_reallocvaluedata_sqlite = path.join(HIVES_DIR, 'ReallocValueDataHive')
Expand Down Expand Up @@ -1914,3 +1920,97 @@ def test_invalid_parent_fragment():
c += 1

assert c == 1

def test_bifragmented():
with open(hive_recon_2, 'rb') as f:
r = RegistryCarve.HiveReconstructor(f)
r.find_fragments()

h = md5()

c = 0
for i in r.reconstruct_bifragmented():
c += 1
h.update(i[1])

assert c == 1
assert h.hexdigest() == 'edaf7986726c1343752763bd1b31ddf2'

def test_trifragmented():
with open(hive_recon_3, 'rb') as f:
r = RegistryCarve.HiveReconstructor(f)
r.find_fragments()

h = md5()

c = 0
for i in r.reconstruct_trifragmented():
c += 1
h.update(i[1])

assert c == 1
assert h.hexdigest() == '2b9c80fed56a3f25ef7fd03d9462387f'

def test_quadfragmented():
with open(hive_recon_4, 'rb') as f:
r = RegistryCarve.HiveReconstructor(f)
r.find_fragments()

h = md5()

c = 0
for i in r.reconstruct_quadfragmented():
c += 1
h.update(i[1])

assert c == 1
assert h.hexdigest() == '2b9c80fed56a3f25ef7fd03d9462387f'

def test_biplusfragmented():
with open(hive_recon_2plus1, 'rb') as f:
r = RegistryCarve.HiveReconstructor(f)
r.find_fragments()

h = md5()

c = 0
for i in r.reconstruct_trifragmented():
c += 1
h.update(i[1])

assert c == 1
assert h.hexdigest() == 'edaf7986726c1343752763bd1b31ddf2'

def test_biandquadfragmented():
with open(hive_recon_2and4, 'rb') as f:
r = RegistryCarve.HiveReconstructor(f)
r.find_fragments()

c = 0
for i in r.reconstruct_fragmented():
c += 1

h = md5()
h.update(i[1])
assert h.hexdigest() == '2b9c80fed56a3f25ef7fd03d9462387f' or h.hexdigest() == 'edaf7986726c1343752763bd1b31ddf2'

assert c == 2

h = RegistryCarve.Carver(f)
l = []
for i in h.carve(True, True):
l.append(i)

r = RegistryCarve.HiveReconstructor(f)
r.set_fragments(l)

c = 0
for i in r.reconstruct_fragmented():
c += 1

h = md5()
h.update(i[1])
assert h.hexdigest() == '2b9c80fed56a3f25ef7fd03d9462387f' or h.hexdigest() == 'edaf7986726c1343752763bd1b31ddf2'

assert c == 2

40 changes: 39 additions & 1 deletion yarp-carver
Expand Up @@ -44,6 +44,13 @@ def make_sane_filename(filename):

return filename

def print_progress_carving(bytes_read, bytes_total):
print('Bytes read / Bytes total: {} / {}'.format(bytes_read, bytes_total), file = sys.stderr)

def print_progress_reconstruction():
print('.', end = '', file = sys.stderr)
sys.stderr.flush()

args = parse_args()

if not os.path.isdir(args.output_dir):
Expand All @@ -57,6 +64,10 @@ except (OSError, IOError):
sys.exit(255)

carver = RegistryCarve.Carver(f)
carver.progress_callback = print_progress_carving

results = []

print('Offset\tSize\tTruncated\tFile name or comment')
for carve_result in carver.carve(True, True):
if type(carve_result) is RegistryCarve.CarveResult:
Expand All @@ -79,6 +90,8 @@ for carve_result in carver.carve(True, True):
buf = f.read(carve_result.size)
out_f.write(buf)

results.append(carve_result)

elif type(carve_result) is RegistryCarve.CarveResultFragment:
print('{}\t{}\t{}\t{}'.format(carve_result.offset, carve_result.size, True, '<hive fragment>'))

Expand All @@ -90,6 +103,8 @@ for carve_result in carver.carve(True, True):
buf = f.read(carve_result.size)
out_f.write(buf)

results.append(carve_result)

elif type(carve_result) is RegistryCarve.CarveResultCompressed:
print('{}\t{}\t{}\t{}'.format(carve_result.offset, len(carve_result.buffer_decompressed), 'Unknown', carve_result.filename))

Expand All @@ -99,12 +114,13 @@ for carve_result in carver.carve(True, True):
regf_filename = make_sane_filename(regf_filename)

output_filename = '{}_{}-compressed'.format(carve_result.offset, regf_filename)

output_file = os.path.join(args.output_dir, output_filename)

with open(output_file, 'wb') as out_f:
out_f.write(carve_result.buffer_decompressed)

results.append(carve_result)

elif type(carve_result) is RegistryCarve.CarveResultFragmentCompressed:
print('{}\t{}\t{}\t{}'.format(carve_result.offset, len(carve_result.buffer_decompressed), True, '<hive fragment>'))

Expand All @@ -114,4 +130,26 @@ for carve_result in carver.carve(True, True):
with open(output_file, 'wb') as out_f:
out_f.write(carve_result.buffer_decompressed)

results.append(carve_result)

print('', file = sys.stderr)
print('Reconstructing fragmented hives: ', end = '', file = sys.stderr)
sys.stderr.flush()

reconstructor = RegistryCarve.HiveReconstructor(f)
reconstructor.set_fragments(results)
reconstructor.progress_callback = print_progress_reconstruction
for carve_result, hive_buf in reconstructor.reconstruct_fragmented():
regf_filename = carve_result.filename
if regf_filename.rfind('\\') != -1:
regf_filename = regf_filename.split('\\')[-1]
regf_filename = make_sane_filename(regf_filename)

output_filename = '{}_{}-reconstructed'.format(carve_result.offset, regf_filename)
output_file = os.path.join(args.output_dir, output_filename)

with open(output_file, 'wb') as out_f:
out_f.write(hive_buf)

print('', file = sys.stderr)
f.close()

0 comments on commit 00a96a1

Please sign in to comment.