Skip to content

Commit

Permalink
Merge branch 'hotfix/boto_db_upload' into develop
Browse files Browse the repository at this point in the history
  • Loading branch information
Greg Taylor committed Jan 27, 2011
2 parents c9989fd + bc2fdd8 commit a801861
Show file tree
Hide file tree
Showing 3 changed files with 39 additions and 21 deletions.
24 changes: 15 additions & 9 deletions examples/fabfile.py
Expand Up @@ -2,8 +2,9 @@
import os

from django.core import management
import settings
management.setup_environ(settings)
# We have to re-name this to avoid clashes with fabric.api.settings.
import settings as django_settings
management.setup_environ(django_settings)

from fabric.api import *
# This will import every command, you may need to get more selective if
Expand All @@ -29,17 +30,22 @@
# This is used for reloading gunicorn processes after code updates.
# Only needed for gunicorn-related tasks.
env.GUNICORN_PID_PATH = os.path.join(env.REMOTE_CODEBASE_PATH, 'gunicorn.pid')
# S3 bucket for s3cmd to upload DB backups to.
# Only needed for backup_db_to_s3
env.S3_DB_BACKUP_BUCKET = 'db_backups'

# Only needed for those using the ft_backup_db_to_s3 management command.
# Options for configuring the S3 DB backups.
env.S3_DB_BACKUP = {
'BUCKET': 'your_db_backup_bucket',
'AWS_ACCESS_KEY_ID': django_settings.AWS_ACCESS_KEY_ID,
'AWS_SECRET_ACCESS_KEY': django_settings.AWS_SECRET_ACCESS_KEY,
}

def staging():
"""
Sets env.hosts to the sole staging server. No roledefs means that all
deployment tasks get ran on every member of env.hosts.
"""
env.hosts = ['staging.example.org']

def prod():
"""
Set env.roledefs according to our deployment setup. From this, an
Expand All @@ -54,10 +60,10 @@ def prod():
env.roledefs['media_servers'] = ['media1.example.org']
# Postgres servers.
env.roledefs['db_servers'] = ['db1.example.org']

# Combine all of the roles into the env.hosts list.
env.hosts = [host[0] for host in env.roledefs.values()]

def deploy():
"""
Full git deployment. Migrations, reloading gunicorn.
Expand All @@ -74,4 +80,4 @@ def deploy_soft():
Just checkout the latest source, don't reload.
"""
git_pull()
print("--- Soft Deployment complete. ---")
print("--- Soft Deployment complete. ---")
23 changes: 11 additions & 12 deletions fabtastic/management/commands/ft_backup_db_to_s3.py
@@ -1,12 +1,10 @@
import os
from subprocess import call

from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.core.management.base import BaseCommand

from fabric.api import *
from fabtastic import db
import fabfile
from fabtastic.util.aws import get_s3_connection

class Command(BaseCommand):
help = 'Backs the DB up to S3. Make sure to run s3cmd --configure.'
Expand All @@ -17,13 +15,14 @@ def handle(self, *args, **options):
# Generate a temporary DB dump filename.
dump_filename = db.util.get_db_dump_filename()
# Carry out the DB dump.
db.dump_db_to_file(dump_filename, database)

# Now upload via s3cmd. See note above about s3cmd --configure.
cmd = ['s3cmd', 'put']
cmd.append(dump_filename)
cmd.append('s3://%s/%s' % (env.S3_DB_BACKUP_BUCKET, dump_filename))
call(cmd)

dump_file_path = db.dump_db_to_file(dump_filename, database)

print "Uploading to S3."
conn = get_s3_connection()
bucket = conn.create_bucket(env.S3_DB_BACKUP['BUCKET'])
key = bucket.new_key(dump_filename)
key.set_contents_from_filename(dump_file_path)
print "S3 DB backup complete."

# Clean up the temporary download file.
os.remove(dump_filename)
13 changes: 13 additions & 0 deletions fabtastic/util/aws.py
@@ -0,0 +1,13 @@
"""
Amazon AWS-related utils.
"""
import boto
from fabric.api import *

def get_s3_connection():
"""
Returns an S3Connection object. Uses values from fabfile.env for creds.
"""
conf = env.S3_DB_BACKUP
return boto.connect_s3(conf['AWS_ACCESS_KEY_ID'],
conf['AWS_SECRET_ACCESS_KEY'])

0 comments on commit a801861

Please sign in to comment.