Skip to content

Commit

Permalink
Implemented a boto2 friendly fallback for AWS host settings. For #101.
Browse files Browse the repository at this point in the history
  • Loading branch information
palewire committed Mar 31, 2017
1 parent 59c9aef commit b8f6a8e
Show file tree
Hide file tree
Showing 5 changed files with 41 additions and 12 deletions.
13 changes: 11 additions & 2 deletions bakery/management/commands/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,17 @@ def get_s3_client():
boto3.setup_default_session(**session_kwargs)

s3_kwargs = {}
if hasattr(settings, 'AWS_S3_HOST'):
s3_kwargs['endpoint_url'] = settings.AWS_S3_HOST
if hasattr(settings, 'AWS_S3_ENDPOINT'):
s3_kwargs['endpoint_url'] = settings.AWS_S3_ENDPOINT
elif hasattr(settings, 'AWS_S3_HOST'):
if hasattr(settings, 'AWS_S3_USE_SSL') and settings.AWS_S3_USE_SSL is False:
protocol = "http://"
else:
protocol = "https://"
s3_kwargs['endpoint_url'] = "{}{}".format(
protocol,
settings.AWS_S3_HOST
)
s3_client = boto3.client('s3', **s3_kwargs)
s3_resource = boto3.resource('s3', **s3_kwargs)
return s3_client, s3_resource
Expand Down
6 changes: 4 additions & 2 deletions bakery/management/commands/publish.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,11 +127,13 @@ def handle(self, *args, **options):
# We're finished, print the final output
elapsed_time = time.time() - self.start_time
if self.verbosity > 0:
logger.info("publish completed, %d uploaded and %d deleted files in %.2f seconds" % (
msg = "publish completed, %d uploaded and %d deleted files in %.2f seconds" % (
self.uploaded_files,
self.deleted_files,
elapsed_time
))
)
self.stdout.write(msg)
logger.info(msg)

if self.verbosity > 2:
for f in self.uploaded_file_list:
Expand Down
14 changes: 13 additions & 1 deletion bakery/tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -463,8 +463,20 @@ def test_get_s3_client_handles_no_settings_gracefully(self):
del settings.AWS_SECRET_ACCESS_KEY
get_s3_client()

@override_settings(AWS_S3_HOST='http://example.com')
@override_settings(AWS_S3_ENDPOINT="https://example.com", AWS_S3_HOST='foobar.com')
def test_aws_s3_endpoint_can_be_set(self):
s3_client, s3_resource = get_s3_client()
self.assertEqual(s3_client.meta.endpoint_url, 'https://example.com')
self.assertEqual(s3_resource.meta.client._endpoint.host, 'https://example.com')

@override_settings(AWS_S3_HOST='example.com')
def test_aws_s3_host_can_be_set(self):
s3_client, s3_resource = get_s3_client()
self.assertEqual(s3_client.meta.endpoint_url, 'https://example.com')
self.assertEqual(s3_resource.meta.client._endpoint.host, 'https://example.com')

@override_settings(AWS_S3_HOST='example.com', AWS_S3_USE_SSL=False)
def test_aws_s3_http_host_can_be_set(self):
s3_client, s3_resource = get_s3_client()
self.assertEqual(s3_client.meta.endpoint_url, 'http://example.com')
self.assertEqual(s3_resource.meta.client._endpoint.host, 'http://example.com')
Expand Down
14 changes: 7 additions & 7 deletions docs/settingsvariables.rst
Original file line number Diff line number Diff line change
Expand Up @@ -81,20 +81,20 @@ AWS_SECRET_ACCESS_KEY
AWS_SECRET_ACCESS_KEY = 'your-secret-key'
AWS_S3_HOST
AWS_S3_ENDPOINT
-----------

.. envvar:: AWS_S3_HOST
.. envvar:: AWS_S3_ENDPOINT

The hostname to use when connecting with Amazon Web Service's S3 system. If the
The URL to use when connecting with Amazon Web Service's S3 system. If the
setting is not provided the boto package's default is used.

.. code-block:: python
# Substitute in Amazon's accelerated upload service
AWS_S3_HOST = 's3-accelerate.amazonaws.com'
AWS_S3_ENDPOINT = 'https://s3-accelerate.amazonaws.com'
# Specify the region of the bucket to work around bugs with S3 in certain version of boto
AWS_S3_HOST = 's3-%s.amazonaws.com' % AWS_REGION
AWS_S3_ENDPOINT = 'https://s3-%s.amazonaws.com' % AWS_REGION
BAKERY_GZIP
-----------
Expand All @@ -115,9 +115,9 @@ GZIP_CONTENT_TYPES

A list of file mime types used to determine which files to add the
'Content-Encoding: gzip' metadata header when syncing to Amazon S3.

Defaults to include all 'text/css', 'text/html', 'application/javascript',
'application/x-javascript' and everything else recommended by the HTML5
'application/x-javascript' and everything else recommended by the HTML5
`boilerplate guide <https://github.com/h5bp/server-configs-apache>`_.

Only matters if you have set ``BAKERY_GZIP`` to ``True``.
Expand Down
6 changes: 6 additions & 0 deletions example/project/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,3 +131,9 @@
)
BUILD_DIR = os.path.join(BASE_DIR, '.build/')
STATIC_ROOT = os.path.join(BASE_DIR, '.static/')

AWS_BUCKET_NAME = 'datafoo.latimes.com'
AWS_ACCESS_KEY_ID = 'AKIAIDW7S34FFXN4F4WA'
AWS_SECRET_ACCESS_KEY = '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'
AWS_REGION = 'us-west-1'
AWS_S3_HOST = 's3-%s.amazonaws.com' % AWS_REGION

0 comments on commit b8f6a8e

Please sign in to comment.