Skip to content

Commit

Permalink
Use pg_dumpall instead of pg_dump
Browse files Browse the repository at this point in the history
  • Loading branch information
miohtama committed Feb 13, 2015
1 parent abf76a7 commit 463b580
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 19 deletions.
32 changes: 14 additions & 18 deletions bin/incremental-backup.bash
@@ -1,9 +1,10 @@
#!/bin/bash
#
# Backup site SQL + media files to S3
# Backup all site SQL database + media files to S3 bucket (US region)
#
# http://www.janoszen.com/2013/10/14/backing-up-linux-servers-with-duplicity-and-amazon-aws/
# http://docs.aws.amazon.com/AmazonS3/latest/dev/WebsiteEndpoints.html
# Usage:
#
# bin/incremental-backup.bash AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY BACKUP_ENCRYPTION_KEY
#
# Installation in Python 2.7 virtualenv
#
Expand All @@ -13,30 +14,28 @@
# pip install https://launchpad.net/duplicity/0.7-series/0.7.01/+download/duplicity-0.7.01.tar.gz
# pip install boto
#
# Initialize SSL certificate database for Duplicity (Mozilla's copy):
#
# mkdir /etc/duplicity
# curl ~/.duplicity/cacert.pem http://curl.haxx.se/ca/cacert.pem > /etc/duplicity/cacert.pem
#
# bin/incremental-backup.bash
#
# Note: The user running this script must have sudo -u postgres acces to run pg_dump
#
# Note: This script is safe to run only on a server where you have 100% control and there are no other UNIX users who could see process command line or environment
#
# Note: Do **not** use AWS Frankfurt region - it uses unsupported authentication scheme - https://github.com/s3tools/s3cmd/issues/402
#
# # s3-us-west-2.amazonaws.com/liberty-backup3/liberty-backup
# Further reading:
#
# http://www.janoszen.com/2013/10/14/backing-up-linux-servers-with-duplicity-and-amazon-aws/
# http://docs.aws.amazon.com/AmazonS3/latest/dev/WebsiteEndpoints.html


set -e

# Assume we are /srv/django/mysite
PWD=`pwd`
SITENAME=`basename $PWD`

# Need to access:
# Use duplicity + boto installed in specific Python 2.7 virtualenv
source duplicity-venv/bin/activate

# Our S3 bucket where we drop files
DUPLICITY_TARGET=s3://s3-us-west-2.amazonaws.com/liberty-backup3/$SITENAME

# Tell credentials to Boto
Expand All @@ -50,13 +49,10 @@ if [ -z "$BACKUP_ENCRYPTION_KEY" ]; then
fi

# Create daily dump of the database
sudo -u postgres pg_dump tatianastore_production | bzip2 | gpg --batch --symmetric --passphrase $BACKUP_ENCRYPTION_KEY > backups/$SITENAME-dump-$(date -d "today" +"%Y%m%d").sql.bzip2.gpg

# http://duplicity.nongnu.org/duplicity.1.html
# Incrementally backup all files, inc. just generated SQL dump, media files and source code.
# Our media files are not sensitive, so those are not encrypted.
#duplicity -v9 --ssl-no-check-certificate --s3-use-new-style --s3-european-buckets --s3-use-rrs --s3-use-multiprocessing --exclude=`pwd`/logs --exclude=`pwd`/.git --exclude=`pwd`/venv --exclude=`pwd`/duplicity-venv --no-encryption --full-if-older-than 1M `pwd` $DUPLICITY_TARGET
sudo -u postgres pg_dumpall | bzip2 | gpg --batch --symmetric --passphrase $BACKUP_ENCRYPTION_KEY > backups/$SITENAME-dump-$(date -d "today" +"%Y%m%d").sql.bzip2.gpg

# Use cheap RSS S3 storage, exclude some stuff we know is not important.
# Also we do not need to encrypt media files as in our use case they are not sensitive, SQL dump is encrypted separately.
duplicity --s3-use-rrs --exclude=`pwd`/logs --exclude=`pwd`/.git --exclude=`pwd`/venv --exclude=`pwd`/duplicity-venv --no-encryption --full-if-older-than 1M `pwd` $DUPLICITY_TARGET


Expand Down
2 changes: 1 addition & 1 deletion tatianastore/tasks.py
Expand Up @@ -73,7 +73,7 @@ def backup_site():
echo "from tatianastore import tasks ; tasks.backup_site()"|python manage.py shell
"""
try:
subprocess.check_output(["bin/incremental-backup.bash", settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY, settings.BACKUP_ENCRYPTION_KEY], timeout=4*60*60, stderr=subprocess.PIPE)
subprocess.check_output(["bin/incremental-backup.bash", settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY, settings.BACKUP_ENCRYPTION_KEY, timeout=4*60*60, stderr=subprocess.PIPE)
except subprocess.CalledProcessError as e:
# Capture error in Sentry
logger.error(e.output)
Expand Down

0 comments on commit 463b580

Please sign in to comment.