Skip to content

Commit

Permalink
Merge pull request #121 from sa7mon/relative-file-bug
Browse files Browse the repository at this point in the history
Fix path traversal bug
  • Loading branch information
sa7mon committed Nov 28, 2021
2 parents 6f7a679 + 37421e7 commit fafa30a
Show file tree
Hide file tree
Showing 5 changed files with 39 additions and 7 deletions.
27 changes: 23 additions & 4 deletions S3Scanner/S3Service.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,12 @@
from botocore.client import Config
import datetime
from S3Scanner.exceptions import AccessDeniedException, InvalidEndpointException, BucketMightNotExistException
from os.path import normpath
import pathlib
from concurrent.futures import ThreadPoolExecutor, as_completed
from functools import partial
from urllib3 import disable_warnings
import os


ALL_USERS_URI = 'uri=http://acs.amazonaws.com/groups/global/AllUsers'
AUTH_USERS_URI = 'uri=http://acs.amazonaws.com/groups/global/AuthenticatedUsers'
Expand Down Expand Up @@ -284,21 +285,25 @@ def dump_bucket_multithread(self, bucket, dest_directory, verbose=False, threads

for future in as_completed(futures):
if future.exception():
print(f"{bucket.name} | Download failed: {futures[future]}")
print(f"{bucket.name} | Download failed: {futures[future]} | {future.exception()}")

print(f"{bucket.name} | Dumping completed")

def download_file(self, dest_directory, bucket, verbose, obj):
"""
Download `obj` from `bucket` into `dest_directory`
:param str dest_directory: Directory to store the object into
:param str dest_directory: Directory to store the object into. _Must_ end in a slash
:param S3Bucket bucket: Bucket to download the object from
:param bool verbose: Output verbose messages to the user
:param S3BucketObject obj: Object to downlaod
:return: None
"""
dest_file_path = pathlib.Path(normpath(dest_directory + obj.key))
dest_file_path = pathlib.Path(os.path.normpath(os.path.join(dest_directory, obj.key)))

if not self.is_safe_file_to_download(obj.key, dest_directory):
print(f"{bucket.name} | Skipping file {obj.key}. File references a parent directory.")
return
if dest_file_path.exists():
if dest_file_path.stat().st_size == obj.size:
if verbose:
Expand Down Expand Up @@ -342,6 +347,20 @@ def enumerate_bucket_objects(self, bucket):
raise AccessDeniedException("AccessDenied while enumerating bucket objects")
bucket.objects_enumerated = True

def is_safe_file_to_download(self, file_to_check, dest_directory):
"""
Check if bucket object would be saved outside of `dest_directory` if downloaded.
AWS allows object keys to include relative path characters like '../' which can lead to a
path traversal-like issue where objects get saved outside of the intended directory.
:param string file_to_check: Bucket object key
:param string dest_directory: Path to directory to save file in
:return: bool
"""
file_to_check = os.path.abspath(os.path.join(dest_directory, file_to_check))
safe_dir = os.path.abspath(dest_directory)
return os.path.commonpath([safe_dir]) == os.path.commonpath([safe_dir, file_to_check])

def parse_found_acl(self, bucket):
"""
Translate ACL grants into permission properties. If we were able to read the ACLs, we should be able to skip
Expand Down
2 changes: 1 addition & 1 deletion S3Scanner/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from concurrent.futures import ThreadPoolExecutor, as_completed
from .exceptions import InvalidEndpointException

CURRENT_VERSION = '2.0.1'
CURRENT_VERSION = '2.0.2'
AWS_ENDPOINT = 'https://s3.amazonaws.com'


Expand Down
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[metadata]
name = S3Scanner
version = 2.0.1
version = 2.0.2
author = Dan Salmon
author_email = dan@salmon.cat
description = Scan for open S3 buckets and dump the contents
Expand Down
2 changes: 1 addition & 1 deletion tests/test_scanner.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ def test_arguments():
s = S3Service()

a = subprocess.run([sys.executable, '-m', 'S3Scanner', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
assert a.stdout.decode('utf-8').strip() == '2.0.1'
assert a.stdout.decode('utf-8').strip() == '2.0.2'

b = subprocess.run([sys.executable, '-m', 'S3Scanner', 'scan', '--bucket', 'flaws.cloud'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
assert_scanner_output(s, 'flaws.cloud | bucket_exists | AuthUsers: [], AllUsers: [Read]', b.stdout.decode('utf-8').strip())
Expand Down
13 changes: 13 additions & 0 deletions tests/test_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -545,6 +545,19 @@ def test_download_file():
b = S3Bucket("bucket-no-existo")
s.download_file(os.path.join(dest_folder, ''), b, True, o)

def test_is_safe_file_to_download():
test_setup_new()
s = S3Service()

# Check a good file name
assert s.is_safe_file_to_download("file.txt", "./bucket_dir/") == True
assert s.is_safe_file_to_download("file.txt", "./bucket_dir") == True

# Check file with relative name
assert s.is_safe_file_to_download("../file.txt", "./buckets/") == False
assert s.is_safe_file_to_download("../", "./buckets/") == False
assert s.is_safe_file_to_download("/file.txt", "./buckets/") == False


def test_validate_endpoint_url_nonaws():
disable_warnings()
Expand Down

0 comments on commit fafa30a

Please sign in to comment.