Skip to content

Commit

Permalink
Merge pull request #98 from ZuluPro/s3
Browse files Browse the repository at this point in the history
Remade S3 storage for use django-storage-redux one
  • Loading branch information
ZuluPro committed Sep 1, 2015
2 parents 13df98d + 26be5f5 commit cd32528
Show file tree
Hide file tree
Showing 8 changed files with 297 additions and 139 deletions.
1 change: 1 addition & 0 deletions .coveragerc
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ omit =
exclude_lines =
# Have to re-enable the standard pragma
pragma: no cover
noqa:

# Don't complain about missing debug-only code:
def __repr__
Expand Down
24 changes: 23 additions & 1 deletion dbbackup/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,11 +66,33 @@
BUILTIN_STORAGE = getattr(settings, 'DBBACKUP_BUILTIN_STORAGE', None)
STORAGE_OPTIONS = getattr(settings, 'DBBACKUP_STORAGE_OPTIONS', {})

if hasattr(settings, 'DBBACKUP_BACKUP_DIRECTORY'):
# Deprecation
if hasattr(settings, 'DBBACKUP_BACKUP_DIRECTORY'): # pragma: no cover
BACKUP_DIRECTORY = STORAGE_OPTIONS['location'] = \
getattr(settings, 'DBBACKUP_BACKUP_DIRECTORY', os.getcwd())
warnings.warn("DBBACKUP_BACKUP_DIRECTORY is deprecated, use DBBACKUP_STORAGE_OPTIONS['location']", DeprecationWarning)

if hasattr(settings, 'DBBACKUP_FAKE_HOST'): # noqa
warnings.warn("DBBACKUP_FAKE_HOST is deprecated, use DBBACKUP_HOSTNAME", DeprecationWarning)
HOSTNAME = settings.DBBACKUP_FAKE_HOST

UNSED_AWS_SETTINGS = ('DIRECTORY',)
DEPRECATED_AWS_SETTINGS = (
('BUCKET', 'bucket_name'),
('ACCESS_KEY', 'access_key'),
('SECRET_KEY', 'secret_key'),
('DOMAIN', 'host'),
('IS_SECURE', 'use_ssl'),
('SERVER_SIDE_ENCRYPTION', 'encryption'),
)
if hasattr(settings, 'DBBACKUP_S3_BUCKET'): # pragma: no cover
for old_suffix, new_key in DEPRECATED_AWS_SETTINGS:
if hasattr(settings, 'DBBACKUP_S3_%s' % old_suffix):
STORAGE_OPTIONS[new_key] = getattr(settings, old_suffix)
msg = "DBBACKUP_S3_%s is deprecated, use DBBACKUP_STORAGE_OPTIONS['%s']" % (old_suffix, new_key)
warnings.warn(msg, DeprecationWarning)
for old_suffix in UNSED_AWS_SETTINGS:
if hasattr(settings, 'DBBACKUP_S3_%s' % old_suffix):
msg = "DBBACKUP_S3_%s is now useless" % old_suffix
warnings.warn(msg, DeprecationWarning)
del old_suffix, new_key
2 changes: 1 addition & 1 deletion dbbackup/storage/builtin_django.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def delete_file(self, filepath):
self.storage.delete(name=filepath)

def list_directory(self):
return self.storage.listdir(path='')[1]
return self.storage.listdir('')[1]

def write_file(self, filehandle, filename):
self.storage.save(name=filename, content=filehandle)
Expand Down
91 changes: 18 additions & 73 deletions dbbackup/storage/s3_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,80 +3,25 @@
"""
from __future__ import (absolute_import, division,
print_function, unicode_literals)
import os
from boto.s3.key import Key
from boto.s3.connection import S3Connection
from io import BytesIO
from django.conf import settings
from tempfile import SpooledTemporaryFile
from .base import BaseStorage, StorageError
from .base import StorageError
from .builtin_django import Storage as DjangoStorage

STORAGE_PATH = 'storages.backends.s3boto.S3BotoStorage'

class Storage(BaseStorage):
""" S3 API Storage. """
S3_BUCKET = getattr(settings, 'DBBACKUP_S3_BUCKET', None)
S3_ACCESS_KEY = getattr(settings, 'DBBACKUP_S3_ACCESS_KEY', None)
S3_SECRET_KEY = getattr(settings, 'DBBACKUP_S3_SECRET_KEY', None)
S3_DOMAIN = getattr(settings, 'DBBACKUP_S3_DOMAIN', 's3.amazonaws.com')
S3_IS_SECURE = getattr(settings, 'DBBACKUP_S3_USE_SSL', True)
S3_DIRECTORY = getattr(settings, 'DBBACKUP_S3_DIRECTORY', "django-dbbackups/")
S3_SERVER_SIDE_ENCRYPTION = getattr(settings, 'DBBACKUP_S3_SERVER_SIDE_ENCRYPTION', False)
if S3_DIRECTORY:
S3_DIRECTORY = '%s/' % S3_DIRECTORY.strip('/')

def __init__(self, server_name=None):
self._check_filesystem_errors()
class Storage(DjangoStorage):
"""Filesystem API Storage."""
def __init__(self, server_name=None, **options):
self.name = 'AmazonS3'
self.conn = S3Connection(aws_access_key_id=self.S3_ACCESS_KEY,
aws_secret_access_key=self.S3_SECRET_KEY, host=self.S3_DOMAIN,
is_secure=self.S3_IS_SECURE)
self.bucket = self.conn.get_bucket(self.S3_BUCKET)
BaseStorage.__init__(self)

def _check_filesystem_errors(self):
if not self.S3_BUCKET:
raise StorageError('Filesystem storage requires DBBACKUP_S3_BUCKET to be defined in settings.')
if not self.S3_ACCESS_KEY:
raise StorageError('Filesystem storage requires DBBACKUP_S3_ACCESS_KEY to be defined in settings.')
if not self.S3_SECRET_KEY:
raise StorageError('Filesystem storage requires DBBACKUP_S3_SECRET_KEY to be defined in settings.')

@property
def backup_dir(self):
return self.S3_DIRECTORY

def delete_file(self, filepath):
self.bucket.delete_key(filepath)

def list_directory(self):
return [k.name for k in self.bucket.list(prefix=self.S3_DIRECTORY)]

def write_file(self, filehandle, filename):
# Use multipart upload because normal upload maximum is 5 GB.
filepath = os.path.join(self.S3_DIRECTORY, filename)
filehandle.seek(0)
handle = self.bucket.initiate_multipart_upload(filepath,
encrypt_key=self.S3_SERVER_SIDE_ENCRYPTION)
try:
chunk = 1
while True:
chunkdata = filehandle.read(5 * 1024 * 1024)
if not chunkdata:
break
tmpfile = BytesIO(chunkdata)
tmpfile.seek(0)
handle.upload_part_from_file(tmpfile, chunk)
tmpfile.close()
chunk += 1
handle.complete_upload()
except Exception:
handle.cancel_upload()
raise

def read_file(self, filepath):
""" Read the specified file and return it's handle. """
key = Key(self.bucket)
key.key = filepath
filehandle = SpooledTemporaryFile(max_size=10 * 1024 * 1024)
key.get_contents_to_file(filehandle)
return filehandle
self._check_filesystem_errors(options)
super(Storage, self).__init__(storage_path=STORAGE_PATH,
bucket=options['bucket_name'],
**options)

def _check_filesystem_errors(self, options):
"""Check we have all the required settings defined."""
required_args = ('bucket_name', 'access_key', 'secret_key')
err_msg = "%s storage requires settings.DBBACKUP_STORAGE_OPTIONS['%s'] to be define"
for arg in required_args:
if arg not in options:
raise StorageError(err_msg % (self.name, arg))
62 changes: 62 additions & 0 deletions dbbackup/tests/storages/test_s3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
from io import BytesIO
from django.test import TestCase
import boto
try:
from moto import mock_s3
except SyntaxError:
mock_s3 = None
from dbbackup.storage.s3_storage import Storage as S3Storage
from dbbackup.storage.base import StorageError
from dbbackup.tests.utils import skip_py3


# Python 3.2 fix
if mock_s3 is None:
def mock_s3(obj):
return obj


@mock_s3
@skip_py3
class S3StorageTest(TestCase):
def setUp(self):
self.storage = S3Storage(bucket_name='foo_bucket',
access_key='foo_id',
secret_key='foo_secret')
# Create fixtures
self.conn = boto.connect_s3()
self.bucket = self.conn.create_bucket('foo_bucket')
key = boto.s3.key.Key(self.bucket)
key.key = 'foo_file'
key.set_contents_from_string('bar')

def test_delete_file(self):
self.storage.delete_file('foo_file')
self.assertEqual(0, len(self.bucket.get_all_keys()))

def test_list_directory(self):
files = self.storage.list_directory()
self.assertEqual(len(files), 1)

def test_write_file(self):
self.storage.write_file(BytesIO(b'bar'), 'foo')
self.assertEqual(2, len(self.bucket.get_all_keys()))
key = self.bucket.get_key('foo')
self.assertEqual('bar', key.get_contents_as_string())

def test_read_file(self):
read_file = self.storage.read_file('foo_file')
self.assertEqual(read_file.read(), b'bar')

def test_check(self):
with self.assertRaises(StorageError):
self.storage._check_filesystem_errors({
'bucket_name': '', 'access_key': ''})
with self.assertRaises(StorageError):
self.storage._check_filesystem_errors({
'bucket_name': '', 'secret_key': ''})
with self.assertRaises(StorageError):
self.storage._check_filesystem_errors({
'access_key': '', 'secret_key': ''})
self.storage._check_filesystem_errors({
'bucket_name': '', 'access_key': '', 'secret_key': ''})
29 changes: 19 additions & 10 deletions dbbackup/tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
except ImportError:
from io import StringIO
from django.conf import settings
from django.utils import six
from dbbackup.storage.base import BaseStorage

BASE_FILE = os.path.join(settings.BASE_DIR, 'tests/test.txt')
Expand Down Expand Up @@ -61,13 +62,21 @@ def read_file(self, filepath):


def clean_gpg_keys():
try:
cmd = ("gpg --batch --yes --delete-key '%s'" % GPG_FINGERPRINT)
subprocess.call(cmd, stdout=DEV_NULL, stderr=DEV_NULL)
except:
pass
try:
cmd = ("gpg --batch --yes --delete-secrect-key '%s'" % GPG_FINGERPRINT)
subprocess.call(cmd, stdout=DEV_NULL, stderr=DEV_NULL)
except:
pass
try:
cmd = ("gpg --batch --yes --delete-key '%s'" % GPG_FINGERPRINT)
subprocess.call(cmd, stdout=DEV_NULL, stderr=DEV_NULL)
except:
pass
try:
cmd = ("gpg --batch --yes --delete-secrect-key '%s'" % GPG_FINGERPRINT)
subprocess.call(cmd, stdout=DEV_NULL, stderr=DEV_NULL)
except:
pass


def skip_py3(testcase, reason="Not in Python 3"):
"""Decorator for skip Python 3 tests."""
if six.PY3:
setup = lambda s: s.skipTest(reason)
testcase.setUp = setup
return testcase

0 comments on commit cd32528

Please sign in to comment.