Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Branch: master
Fetching contributors…

Cannot retrieve contributors at this time

executable file 88 lines (64 sloc) 3.18 kB
#!/usr/bin/python
import argparse
import os
from boto.s3.connection import S3Connection
from boto.s3.key import Key
import eventlet
eventlet.monkey_patch()
import invar
class IVS3(invar.InvarUtility):
description = 'Upload a directory of images to S3 concurrently.'
def _init_common_parser(self):
"""
Override default parser behavior.
"""
self.argparser = argparse.ArgumentParser(description=self.description, epilog=self.epilog)
def add_arguments(self):
self.argparser.add_argument('upload_dir', help='Directory to be uploaded.')
self.argparser.add_argument('bucket', help='Bucket in which to put files. You may also specify a path component (e.g. a subdirectory to put the files in).')
self.argparser.add_argument('-c', '--concurrency', help='Number of concurrent uploads.', type=int, default=32)
self.argparser.add_argument('-P', '--acl-public', help='Set uploaded files as public.', dest='public', action='store_true')
self.argparser.add_argument('--acl-private', help='Set uploaded files as private (default).', dest='private', action='store_true')
self.argparser.add_argument('--add-header', help='Add a given HTTP header to each file. Use NAME:VALUE format. May be specified multiple times.', dest='headers', action='append')
self.argparser.add_argument('-v', '--verbose', action='store_true', help='Display detailed error messages.')
def main(self):
if '/' in self.args.bucket:
self.bucket, self.upload_path = self.args.bucket.split('/', 1)
if self.upload_path[-1] != '/':
self.upload_path += '/'
else:
self.bucket = self.args.bucket
self.upload_path = None
self.headers = dict([h.split(':') for h in self.args.headers]) if self.args.headers else None
pile = eventlet.GreenPile(self.args.concurrency)
for key_name, absolute_path in self.find_file_paths():
pile.spawn(self.upload, key_name, absolute_path)
# Wait for all greenlets to finish
list(pile)
def find_file_paths(self):
"""
A generator function that recursively finds all files in the upload directory.
"""
for root, dirs, files in os.walk(self.args.upload_dir):
rel_path = os.path.relpath(root, self.args.upload_dir)
for f in files:
if rel_path == '.':
yield (f, os.path.join(root, f))
else:
yield (os.path.join(rel_path, f), os.path.join(root, f))
def upload(self, key_name, absolute_path):
conn = S3Connection()
bucket = conn.get_bucket(self.bucket)
k = Key(bucket)
k.key = key_name
if self.upload_path:
k.key = self.upload_path + k.key
if self.args.public and not self.args.private:
policy = 'public-read'
else:
policy = 'private'
k.set_contents_from_filename(absolute_path, headers=self.headers, policy=policy)
print 'Uploaded %s' % key_name
if __name__ == "__main__":
ivs3 = IVS3()
ivs3.main()
Jump to Line
Something went wrong with that request. Please try again.