Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 15 additions & 9 deletions 10/backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,13 @@

BACKUP_DIR = os.environ["BACKUP_DIR"]

S3_PATH = os.environ["S3_PATH"]
S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS") or "STANDARD_IA"
S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or ""
S3_PATH = os.environ.get("S3_PATH", "")
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY")
S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS", "STANDARD_IA")
S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "")

DB_USE_ENV = os.environ.get("DB_USE_ENV") or False
DB_USE_ENV = os.environ.get("DB_USE_ENV", False)
DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE")

if not DB_NAME:
Expand All @@ -23,17 +25,17 @@
DB_HOST = os.environ["DB_HOST"]
DB_PASS = os.environ["DB_PASS"]
DB_USER = os.environ["DB_USER"]
DB_PORT = os.environ.get("DB_PORT") or "5432"
DB_PORT = os.environ.get("DB_PORT", "5432")

MAIL_TO = os.environ.get("MAIL_TO")
MAIL_FROM = os.environ.get("MAIL_FROM")
WEBHOOK = os.environ.get("WEBHOOK")
WEBHOOK_METHOD = os.environ.get("WEBHOOK_METHOD")
WEBHOOK_DATA = os.environ.get("WEBHOOK_DATA")
WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS") or ""
WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS", "")
KEEP_BACKUP_DAYS = int(os.environ.get("KEEP_BACKUP_DAYS", 7))
FILENAME = os.environ.get("FILENAME", DB_NAME + "_%Y-%m-%d")
PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS") or ""
PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS", "")

file_name = dt.strftime(FILENAME)
backup_file = os.path.join(BACKUP_DIR, file_name)
Expand Down Expand Up @@ -112,8 +114,12 @@ def main():
take_backup()
backup_size=os.path.getsize(backup_file)

log("Uploading to S3")
upload_backup()
if AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY:
log("Uploading to S3")
upload_backup()
else:
log("Skipping S3 upload, no AWS credentials provided")

log("Pruning local backup copies")
prune_local_backup_files()
end_time = datetime.now()
Expand Down
14 changes: 7 additions & 7 deletions 10/restore.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@

BACKUP_DIR = os.environ["BACKUP_DIR"]

S3_PATH = os.environ["S3_PATH"]
S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or ""
S3_PATH = os.environ.get("S3_PATH", "")
S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "")

DB_USE_ENV = os.environ.get("DB_USE_ENV") or False
DB_USE_ENV = os.environ.get("DB_USE_ENV", False)
DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE")

if not DB_NAME:
Expand All @@ -20,7 +20,7 @@
DB_HOST = os.environ["DB_HOST"]
DB_PASS = os.environ["DB_PASS"]
DB_USER = os.environ["DB_USER"]
DB_PORT = os.environ.get("DB_PORT") or "5432"
DB_PORT = os.environ.get("DB_PORT", "5432")

file_name = sys.argv[1]
backup_file = os.path.join(BACKUP_DIR, file_name)
Expand Down Expand Up @@ -48,7 +48,7 @@ def restore_backup():
if not backup_exists():
sys.stderr.write("Backup file doesn't exists!\n")
sys.exit(1)

# restore postgres-backup
env = os.environ.copy()
if DB_USE_ENV:
Expand All @@ -71,10 +71,10 @@ def main():
else:
log("Downloading database dump")
download_backup()

log("Restoring database")
restore_backup()

log("Restore complete, took %.2f seconds" % (datetime.now() - start_time).total_seconds())

if __name__ == "__main__":
Expand Down
24 changes: 15 additions & 9 deletions 11/backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,13 @@

BACKUP_DIR = os.environ["BACKUP_DIR"]

S3_PATH = os.environ["S3_PATH"]
S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS") or "STANDARD_IA"
S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or ""
S3_PATH = os.environ.get("S3_PATH", "")
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY")
S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS", "STANDARD_IA")
S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "")

DB_USE_ENV = os.environ.get("DB_USE_ENV") or False
DB_USE_ENV = os.environ.get("DB_USE_ENV", False)
DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE")

if not DB_NAME:
Expand All @@ -23,17 +25,17 @@
DB_HOST = os.environ["DB_HOST"]
DB_PASS = os.environ["DB_PASS"]
DB_USER = os.environ["DB_USER"]
DB_PORT = os.environ.get("DB_PORT") or "5432"
DB_PORT = os.environ.get("DB_PORT", "5432")

MAIL_TO = os.environ.get("MAIL_TO")
MAIL_FROM = os.environ.get("MAIL_FROM")
WEBHOOK = os.environ.get("WEBHOOK")
WEBHOOK_METHOD = os.environ.get("WEBHOOK_METHOD")
WEBHOOK_DATA = os.environ.get("WEBHOOK_DATA")
WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS") or ""
WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS", "")
KEEP_BACKUP_DAYS = int(os.environ.get("KEEP_BACKUP_DAYS", 7))
FILENAME = os.environ.get("FILENAME", DB_NAME + "_%Y-%m-%d")
PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS") or ""
PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS", "")

file_name = dt.strftime(FILENAME)
backup_file = os.path.join(BACKUP_DIR, file_name)
Expand Down Expand Up @@ -112,8 +114,12 @@ def main():
take_backup()
backup_size=os.path.getsize(backup_file)

log("Uploading to S3")
upload_backup()
if AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY:
log("Uploading to S3")
upload_backup()
else:
log("Skipping S3 upload, no AWS credentials provided")

log("Pruning local backup copies")
prune_local_backup_files()
end_time = datetime.now()
Expand Down
14 changes: 7 additions & 7 deletions 11/restore.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@

BACKUP_DIR = os.environ["BACKUP_DIR"]

S3_PATH = os.environ["S3_PATH"]
S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or ""
S3_PATH = os.environ.get("S3_PATH", "")
S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "")

DB_USE_ENV = os.environ.get("DB_USE_ENV") or False
DB_USE_ENV = os.environ.get("DB_USE_ENV", False)
DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE")

if not DB_NAME:
Expand All @@ -20,7 +20,7 @@
DB_HOST = os.environ["DB_HOST"]
DB_PASS = os.environ["DB_PASS"]
DB_USER = os.environ["DB_USER"]
DB_PORT = os.environ.get("DB_PORT") or "5432"
DB_PORT = os.environ.get("DB_PORT", "5432")

file_name = sys.argv[1]
backup_file = os.path.join(BACKUP_DIR, file_name)
Expand Down Expand Up @@ -48,7 +48,7 @@ def restore_backup():
if not backup_exists():
sys.stderr.write("Backup file doesn't exists!\n")
sys.exit(1)

# restore postgres-backup
env = os.environ.copy()
if DB_USE_ENV:
Expand All @@ -71,10 +71,10 @@ def main():
else:
log("Downloading database dump")
download_backup()

log("Restoring database")
restore_backup()

log("Restore complete, took %.2f seconds" % (datetime.now() - start_time).total_seconds())

if __name__ == "__main__":
Expand Down
24 changes: 15 additions & 9 deletions 12/backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,13 @@

BACKUP_DIR = os.environ["BACKUP_DIR"]

S3_PATH = os.environ["S3_PATH"]
S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS") or "STANDARD_IA"
S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or ""
S3_PATH = os.environ.get("S3_PATH", "")
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY")
S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS", "STANDARD_IA")
S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "")

DB_USE_ENV = os.environ.get("DB_USE_ENV") or False
DB_USE_ENV = os.environ.get("DB_USE_ENV", False)
DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE")

if not DB_NAME:
Expand All @@ -23,17 +25,17 @@
DB_HOST = os.environ["DB_HOST"]
DB_PASS = os.environ["DB_PASS"]
DB_USER = os.environ["DB_USER"]
DB_PORT = os.environ.get("DB_PORT") or "5432"
DB_PORT = os.environ.get("DB_PORT", "5432")

MAIL_TO = os.environ.get("MAIL_TO")
MAIL_FROM = os.environ.get("MAIL_FROM")
WEBHOOK = os.environ.get("WEBHOOK")
WEBHOOK_METHOD = os.environ.get("WEBHOOK_METHOD")
WEBHOOK_DATA = os.environ.get("WEBHOOK_DATA")
WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS") or ""
WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS", "")
KEEP_BACKUP_DAYS = int(os.environ.get("KEEP_BACKUP_DAYS", 7))
FILENAME = os.environ.get("FILENAME", DB_NAME + "_%Y-%m-%d")
PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS") or ""
PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS", "")

file_name = dt.strftime(FILENAME)
backup_file = os.path.join(BACKUP_DIR, file_name)
Expand Down Expand Up @@ -112,8 +114,12 @@ def main():
take_backup()
backup_size=os.path.getsize(backup_file)

log("Uploading to S3")
upload_backup()
if AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY:
log("Uploading to S3")
upload_backup()
else:
log("Skipping S3 upload, no AWS credentials provided")

log("Pruning local backup copies")
prune_local_backup_files()
end_time = datetime.now()
Expand Down
14 changes: 7 additions & 7 deletions 12/restore.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@

BACKUP_DIR = os.environ["BACKUP_DIR"]

S3_PATH = os.environ["S3_PATH"]
S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or ""
S3_PATH = os.environ.get("S3_PATH", "")
S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "")

DB_USE_ENV = os.environ.get("DB_USE_ENV") or False
DB_USE_ENV = os.environ.get("DB_USE_ENV", False)
DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE")

if not DB_NAME:
Expand All @@ -20,7 +20,7 @@
DB_HOST = os.environ["DB_HOST"]
DB_PASS = os.environ["DB_PASS"]
DB_USER = os.environ["DB_USER"]
DB_PORT = os.environ.get("DB_PORT") or "5432"
DB_PORT = os.environ.get("DB_PORT", "5432")

file_name = sys.argv[1]
backup_file = os.path.join(BACKUP_DIR, file_name)
Expand Down Expand Up @@ -48,7 +48,7 @@ def restore_backup():
if not backup_exists():
sys.stderr.write("Backup file doesn't exists!\n")
sys.exit(1)

# restore postgres-backup
env = os.environ.copy()
if DB_USE_ENV:
Expand All @@ -71,10 +71,10 @@ def main():
else:
log("Downloading database dump")
download_backup()

log("Restoring database")
restore_backup()

log("Restore complete, took %.2f seconds" % (datetime.now() - start_time).total_seconds())

if __name__ == "__main__":
Expand Down
24 changes: 15 additions & 9 deletions 13/backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,13 @@

BACKUP_DIR = os.environ["BACKUP_DIR"]

S3_PATH = os.environ["S3_PATH"]
S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS") or "STANDARD_IA"
S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or ""
S3_PATH = os.environ.get("S3_PATH", "")
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY")
S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS", "STANDARD_IA")
S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "")

DB_USE_ENV = os.environ.get("DB_USE_ENV") or False
DB_USE_ENV = os.environ.get("DB_USE_ENV", False)
DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE")

if not DB_NAME:
Expand All @@ -23,17 +25,17 @@
DB_HOST = os.environ["DB_HOST"]
DB_PASS = os.environ["DB_PASS"]
DB_USER = os.environ["DB_USER"]
DB_PORT = os.environ.get("DB_PORT") or "5432"
DB_PORT = os.environ.get("DB_PORT", "5432")

MAIL_TO = os.environ.get("MAIL_TO")
MAIL_FROM = os.environ.get("MAIL_FROM")
WEBHOOK = os.environ.get("WEBHOOK")
WEBHOOK_METHOD = os.environ.get("WEBHOOK_METHOD")
WEBHOOK_DATA = os.environ.get("WEBHOOK_DATA")
WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS") or ""
WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS", "")
KEEP_BACKUP_DAYS = int(os.environ.get("KEEP_BACKUP_DAYS", 7))
FILENAME = os.environ.get("FILENAME", DB_NAME + "_%Y-%m-%d")
PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS") or ""
PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS", "")

file_name = dt.strftime(FILENAME)
backup_file = os.path.join(BACKUP_DIR, file_name)
Expand Down Expand Up @@ -112,8 +114,12 @@ def main():
take_backup()
backup_size=os.path.getsize(backup_file)

log("Uploading to S3")
upload_backup()
if AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY:
log("Uploading to S3")
upload_backup()
else:
log("Skipping S3 upload, no AWS credentials provided")

log("Pruning local backup copies")
prune_local_backup_files()
end_time = datetime.now()
Expand Down
14 changes: 7 additions & 7 deletions 13/restore.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@

BACKUP_DIR = os.environ["BACKUP_DIR"]

S3_PATH = os.environ["S3_PATH"]
S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or ""
S3_PATH = os.environ.get("S3_PATH", "")
S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "")

DB_USE_ENV = os.environ.get("DB_USE_ENV") or False
DB_USE_ENV = os.environ.get("DB_USE_ENV", False)
DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE")

if not DB_NAME:
Expand All @@ -20,7 +20,7 @@
DB_HOST = os.environ["DB_HOST"]
DB_PASS = os.environ["DB_PASS"]
DB_USER = os.environ["DB_USER"]
DB_PORT = os.environ.get("DB_PORT") or "5432"
DB_PORT = os.environ.get("DB_PORT", "5432")

file_name = sys.argv[1]
backup_file = os.path.join(BACKUP_DIR, file_name)
Expand Down Expand Up @@ -48,7 +48,7 @@ def restore_backup():
if not backup_exists():
sys.stderr.write("Backup file doesn't exists!\n")
sys.exit(1)

# restore postgres-backup
env = os.environ.copy()
if DB_USE_ENV:
Expand All @@ -71,10 +71,10 @@ def main():
else:
log("Downloading database dump")
download_backup()

log("Restoring database")
restore_backup()

log("Restore complete, took %.2f seconds" % (datetime.now() - start_time).total_seconds())

if __name__ == "__main__":
Expand Down
Loading
Loading