Skip to content

Commit

Permalink
A ton more logging during the sync
Browse files Browse the repository at this point in the history
  • Loading branch information
palewire committed Apr 21, 2017
1 parent af6ec0a commit 064d23a
Showing 1 changed file with 10 additions and 1 deletion.
11 changes: 10 additions & 1 deletion bakery/management/commands/publish.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,12 +258,18 @@ def sync_with_s3(self):
self.update_list = []

# Figure out which files need to be updated and upload all these files
logger.debug("Comparing {} local files with bucket".format(len(self.update_list)))
if self.no_pooling:
[self.compare_local_file(f) for f in self.local_file_list]
[self.upload_to_s3(*u) for u in self.update_list]
else:
pool = ThreadPool(processes=10)
pool.map(self.compare_local_file, self.local_file_list)

logger.debug("Uploading {} new or updated files to bucket".format(len(self.update_list)))
if self.no_pooling:
[self.upload_to_s3(*u) for u in self.update_list]
else:
pool = ThreadPool(processes=10)
pool.map(self.pooled_upload_to_s3, self.update_list)

def compare_local_file(self, file_key):
Expand All @@ -288,19 +294,22 @@ def compare_local_file(self, file_key):

# If their md5 hexdigests match, do nothing
if s3_md5 == local_md5 and not self.force_publish:
logger.debug("{} has not changed".format(file_key))
pass
# Unless we want ot publish everything no matter what, then add it to the update list
elif self.force_publish:
self.update_list.append((file_key, file_path))
# And if they don't match, we want to add it as well
else:
logger.debug("{} has changed".format(file_key))
self.update_list.append((file_key, file_path))

# Remove the file from the s3 dict, we don't need it anymore
del self.s3_obj_dict[file_key]

# if the file doesn't exist, queue it for creation
else:
logger.debug("{} has been added".format(file_key))
self.update_list.append((file_key, file_path))

def pooled_upload_to_s3(self, payload):
Expand Down

0 comments on commit 064d23a

Please sign in to comment.