Skip to content
This repository has been archived by the owner on Sep 12, 2018. It is now read-only.

Commit

Permalink
Merge pull request #389 from dotcloud/0.7-Cloudfront
Browse files Browse the repository at this point in the history
Optional cf support
  • Loading branch information
dmp42 committed May 27, 2014
2 parents 3858749 + ba9fb0c commit be5849c
Show file tree
Hide file tree
Showing 2 changed files with 65 additions and 11 deletions.
66 changes: 57 additions & 9 deletions docker_registry/drivers/s3.py
Expand Up @@ -15,18 +15,50 @@
from docker_registry.core import compat
from docker_registry.core import lru

# import gevent.monkey
# gevent.monkey.patch_all()

import logging
import os
import re
import time

import boto.exception
import boto.s3
import boto.s3.connection
import boto.s3.key

logger = logging.getLogger(__name__)


class Cloudfront():
def __init__(self, awsaccess, awssecret, base, keyid, privatekey):
boto.connect_cloudfront(
awsaccess,
awssecret
)
host = re.compile('^https?://([^/]+)').findall(base)
self.dist = boto.cloudfront.distribution.Distribution(domain_name=host)
self.base = base
self.keyid = keyid
self.privatekey = privatekey
try:
self.privatekey = open(privatekey).read()
except Exception:
logger.debug('Passed private key is not readable. Assume string.')

def sign(self, url, expire_time=0):
path = os.path.join(self.base, url)
if expire_time:
expire_time = time.time() + expire_time
return self.dist.create_signed_url(
path,
self.keyid,
private_key_string=self.privatekey,
expire_time=int(expire_time)
)

def pub(self, path):
return os.path.join(self.base, path)


class Storage(coreboto.Base):

def __init__(self, path, config):
Expand All @@ -48,6 +80,16 @@ def makeConnection(self):
**kwargs)
logger.warn("No S3 region specified, using boto default region, " +
"this may affect performance and stability.")
# Connect cloudfront if we are required to
if self._config.cloudfront:
self.signer = Cloudfront(
self._config.s3_access_key,
self._config.s3_secret_key,
self._config.cloudfront['base'],
self._config.cloudfront['keyid'],
self._config.cloudfront['keysecret']
).sign

return boto.s3.connection.S3Connection(
self._config.s3_access_key,
self._config.s3_secret_key,
Expand Down Expand Up @@ -82,16 +124,22 @@ def stream_write(self, path, fp):
mp.upload_part_from_file(io, num_part)
num_part += 1
io.close()
except IOError:
pass
except IOError as e:
raise e
mp.complete_upload()

def content_redirect_url(self, path):
path = self._init_path(path)
key = self.makeKey(path)
if not key.exists():
raise IOError('No such key: \'{0}\''.format(path))
return key.generate_url(
expires_in=1200,
method='GET',
query_auth=True)

# No cloudfront? Sign to the bucket
if not self.signer:
return key.generate_url(
expires_in=1200,
method='GET',
query_auth=True)

# Have cloudfront? Sign it
return self.signer(path, expire_time=60)
10 changes: 8 additions & 2 deletions tests/test_s3.py
Expand Up @@ -60,10 +60,16 @@ def test_stream_write(self):
content = self.gen_random_string(8 * 1024 * 1024)
io = StringIOWithError(content)
assert not self._storage.exists(filename)
self._storage.stream_write(filename, io)
try:
self._storage.stream_write(filename, io)
except IOError:
pass
assert self._storage.exists(filename)
# Test that EOFed io string throws IOError on lib/storage/s3
self._storage.stream_write(filename, io)
try:
self._storage.stream_write(filename, io)
except IOError:
pass
# Cleanup
io.close()
self._storage.remove(filename)
Expand Down

0 comments on commit be5849c

Please sign in to comment.