Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 9 additions & 7 deletions src/sentry/models/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def enum(**named_values):
def _get_size_and_checksum(fileobj):
size = 0
checksum = sha1()
while 1:
while True:
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

not that it matters much but while 1 is optimized in python 2 and while True is not.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

autopep8 changed those automatically, so not really sure we want to fight it.

chunk = fileobj.read(65536)
if not chunk:
break
Expand Down Expand Up @@ -134,7 +134,7 @@ def from_files(cls, files, organization=None):

def _upload_and_pend_chunk(fileobj, size, checksum, lock):
blob = cls(size=size, checksum=checksum)
blob.path = cls.generate_unique_path(blob.timestamp)
blob.path = cls.generate_unique_path()
storage = get_storage()
storage.save(blob.path, fileobj)
blobs_to_save.append((blob, lock))
Expand All @@ -156,7 +156,7 @@ def _save_blob(blob):
_ensure_blob_owned(blob)

def _flush_blobs():
while 1:
while True:
try:
blob, lock = blobs_to_save.pop()
except IndexError:
Expand Down Expand Up @@ -229,7 +229,7 @@ def from_file(cls, fileobj):
return existing

blob = cls(size=size, checksum=checksum)
blob.path = cls.generate_unique_path(blob.timestamp)
blob.path = cls.generate_unique_path()
storage = get_storage()
storage.save(blob.path, fileobj)
blob.save()
Expand All @@ -238,9 +238,11 @@ def from_file(cls, fileobj):
return blob

@classmethod
def generate_unique_path(cls, timestamp):
pieces = [six.text_type(x) for x in divmod(int(timestamp.strftime('%s')), ONE_DAY)]
pieces.append(uuid4().hex)
def generate_unique_path(cls):
# We intentionally do not use checksums as path names to avoid concurrency issues
# when we attempt concurrent uploads for any reason.
uuid_hex = uuid4().hex
pieces = [uuid_hex[:2], uuid_hex[2:6], uuid_hex[6:]]
return u'/'.join(pieces)

def delete(self, *args, **kwargs):
Expand Down
12 changes: 12 additions & 0 deletions tests/sentry/models/test_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,18 @@ def test_from_file(self):
assert my_file1.checksum == my_file2.checksum
assert my_file1.path == my_file2.path

def test_generate_unique_path(self):
path = FileBlob.generate_unique_path()
assert path

parts = path.split('/')
assert len(parts) == 3
assert map(len, parts) == [2, 4, 26]

# Check uniqueness
path2 = FileBlob.generate_unique_path()
assert path != path2


class FileTest(TestCase):
def test_file_handling(self):
Expand Down