Skip to content

Commit

Permalink
better way of tracking what caches need to be uploaded; fixes #13270
Browse files Browse the repository at this point in the history
  • Loading branch information
cvrebert committed Apr 3, 2014
1 parent 4c049e3 commit 0b6f190
Showing 1 changed file with 41 additions and 8 deletions.
49 changes: 41 additions & 8 deletions test-infra/s3_cache.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
#!/usr/bin/env python2.7
# pylint: disable=C0301
from __future__ import absolute_import, unicode_literals, print_function, division

from sys import argv
from os import environ, stat, chdir, remove as _delete_file
from os.path import isfile, dirname, basename, abspath, realpath, expandvars
from os.path import dirname, basename, abspath, realpath, expandvars
from hashlib import sha256
from subprocess import check_call as run
from json import load
from json import load, dump as save
from contextlib import contextmanager
from datetime import datetime

Expand All @@ -16,7 +17,7 @@


CONFIG_FILE = './S3Cachefile.json'
NEED_TO_UPLOAD_MARKER = '.need-to-upload'
UPLOAD_TODO_FILE = './S3CacheTodo.json'
BYTES_PER_MB = 1024 * 1024


Expand All @@ -29,6 +30,24 @@ def timer():
print("\tDone. Took", int(elapsed.total_seconds()), "second(s).")


@contextmanager
def todo_file(writeback=True):
try:
with open(UPLOAD_TODO_FILE, 'rt') as json_file:
todo = load(json_file)
except (IOError, OSError, ValueError):
todo = {}

yield todo

if writeback:
try:
with open(UPLOAD_TODO_FILE, 'wt') as json_file:
save(todo, json_file)
except (OSError, IOError) as save_err:
print("Error saving {}:".format(UPLOAD_TODO_FILE), save_err)


def _sha256_of_file(filename):
hasher = sha256()
with open(filename, 'rb') as input_file:
Expand All @@ -45,6 +64,21 @@ def _delete_file_quietly(filename):
pass


def mark_needs_uploading(cache_name):
with todo_file() as todo:
todo[cache_name] = True


def mark_uploaded(cache_name):
with todo_file() as todo:
todo.pop(cache_name, None)


def need_to_upload(cache_name):
with todo_file(writeback=False) as todo:
return todo.get(cache_name, False)


def _tarball_size(directory):
kib = stat(_tarball_filename_for(directory)).st_size // BYTES_PER_MB
return "{} MiB".format(kib)
Expand All @@ -67,14 +101,13 @@ def _extract_tarball(directory):


def download(directory):
_delete_file_quietly(NEED_TO_UPLOAD_MARKER)
mark_uploaded(cache_name) # reset
try:
print("Downloading {} tarball from S3...".format(cache_name))
with timer():
key.get_contents_to_filename(_tarball_filename_for(directory))
except S3ResponseError as err:
open(NEED_TO_UPLOAD_MARKER, 'a').close()
print(err)
mark_needs_uploading(cache_name)
raise SystemExit("Cached {} download failed!".format(cache_name))
print("Downloaded {}.".format(_tarball_size(directory)))
_extract_tarball(directory)
Expand All @@ -87,7 +120,7 @@ def upload(directory):
with timer():
key.set_contents_from_filename(_tarball_filename_for(directory))
print("{} cache successfully updated.".format(cache_name))
_delete_file_quietly(NEED_TO_UPLOAD_MARKER)
mark_uploaded(cache_name)


if __name__ == '__main__':
Expand Down Expand Up @@ -135,7 +168,7 @@ def upload(directory):
if mode == 'download':
download(directory)
elif mode == 'upload':
if isfile(NEED_TO_UPLOAD_MARKER): # FIXME
if need_to_upload(cache_name):
upload(directory)
else:
print("No need to upload anything.")
Expand Down

0 comments on commit 0b6f190

Please sign in to comment.