diff --git a/10/backup.py b/10/backup.py index c4bf67c..f34014c 100644 --- a/10/backup.py +++ b/10/backup.py @@ -9,11 +9,13 @@ BACKUP_DIR = os.environ["BACKUP_DIR"] -S3_PATH = os.environ["S3_PATH"] -S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS") or "STANDARD_IA" -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or "" +S3_PATH = os.environ.get("S3_PATH", "") +AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID") +AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY") +S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS", "STANDARD_IA") +S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") -DB_USE_ENV = os.environ.get("DB_USE_ENV") or False +DB_USE_ENV = os.environ.get("DB_USE_ENV", False) DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") if not DB_NAME: @@ -23,17 +25,17 @@ DB_HOST = os.environ["DB_HOST"] DB_PASS = os.environ["DB_PASS"] DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT") or "5432" + DB_PORT = os.environ.get("DB_PORT", "5432") MAIL_TO = os.environ.get("MAIL_TO") MAIL_FROM = os.environ.get("MAIL_FROM") WEBHOOK = os.environ.get("WEBHOOK") WEBHOOK_METHOD = os.environ.get("WEBHOOK_METHOD") WEBHOOK_DATA = os.environ.get("WEBHOOK_DATA") -WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS") or "" +WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS", "") KEEP_BACKUP_DAYS = int(os.environ.get("KEEP_BACKUP_DAYS", 7)) FILENAME = os.environ.get("FILENAME", DB_NAME + "_%Y-%m-%d") -PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS") or "" +PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS", "") file_name = dt.strftime(FILENAME) backup_file = os.path.join(BACKUP_DIR, file_name) @@ -112,8 +114,12 @@ def main(): take_backup() backup_size=os.path.getsize(backup_file) - log("Uploading to S3") - upload_backup() + if AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY: + log("Uploading to S3") + upload_backup() + else: + log("Skipping S3 upload, no AWS credentials provided") + log("Pruning local backup copies") prune_local_backup_files() end_time = datetime.now() diff --git a/10/restore.py b/10/restore.py index 4ce1d26..4abc06d 100644 --- a/10/restore.py +++ b/10/restore.py @@ -7,10 +7,10 @@ BACKUP_DIR = os.environ["BACKUP_DIR"] -S3_PATH = os.environ["S3_PATH"] -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or "" +S3_PATH = os.environ.get("S3_PATH", "") +S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") -DB_USE_ENV = os.environ.get("DB_USE_ENV") or False +DB_USE_ENV = os.environ.get("DB_USE_ENV", False) DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") if not DB_NAME: @@ -20,7 +20,7 @@ DB_HOST = os.environ["DB_HOST"] DB_PASS = os.environ["DB_PASS"] DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT") or "5432" + DB_PORT = os.environ.get("DB_PORT", "5432") file_name = sys.argv[1] backup_file = os.path.join(BACKUP_DIR, file_name) @@ -48,7 +48,7 @@ def restore_backup(): if not backup_exists(): sys.stderr.write("Backup file doesn't exists!\n") sys.exit(1) - + # restore postgres-backup env = os.environ.copy() if DB_USE_ENV: @@ -71,10 +71,10 @@ def main(): else: log("Downloading database dump") download_backup() - + log("Restoring database") restore_backup() - + log("Restore complete, took %.2f seconds" % (datetime.now() - start_time).total_seconds()) if __name__ == "__main__": diff --git a/11/backup.py b/11/backup.py index c4bf67c..f34014c 100644 --- a/11/backup.py +++ b/11/backup.py @@ -9,11 +9,13 @@ BACKUP_DIR = os.environ["BACKUP_DIR"] -S3_PATH = os.environ["S3_PATH"] -S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS") or "STANDARD_IA" -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or "" +S3_PATH = os.environ.get("S3_PATH", "") +AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID") +AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY") +S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS", "STANDARD_IA") +S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") -DB_USE_ENV = os.environ.get("DB_USE_ENV") or False +DB_USE_ENV = os.environ.get("DB_USE_ENV", False) DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") if not DB_NAME: @@ -23,17 +25,17 @@ DB_HOST = os.environ["DB_HOST"] DB_PASS = os.environ["DB_PASS"] DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT") or "5432" + DB_PORT = os.environ.get("DB_PORT", "5432") MAIL_TO = os.environ.get("MAIL_TO") MAIL_FROM = os.environ.get("MAIL_FROM") WEBHOOK = os.environ.get("WEBHOOK") WEBHOOK_METHOD = os.environ.get("WEBHOOK_METHOD") WEBHOOK_DATA = os.environ.get("WEBHOOK_DATA") -WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS") or "" +WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS", "") KEEP_BACKUP_DAYS = int(os.environ.get("KEEP_BACKUP_DAYS", 7)) FILENAME = os.environ.get("FILENAME", DB_NAME + "_%Y-%m-%d") -PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS") or "" +PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS", "") file_name = dt.strftime(FILENAME) backup_file = os.path.join(BACKUP_DIR, file_name) @@ -112,8 +114,12 @@ def main(): take_backup() backup_size=os.path.getsize(backup_file) - log("Uploading to S3") - upload_backup() + if AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY: + log("Uploading to S3") + upload_backup() + else: + log("Skipping S3 upload, no AWS credentials provided") + log("Pruning local backup copies") prune_local_backup_files() end_time = datetime.now() diff --git a/11/restore.py b/11/restore.py index 4ce1d26..4abc06d 100644 --- a/11/restore.py +++ b/11/restore.py @@ -7,10 +7,10 @@ BACKUP_DIR = os.environ["BACKUP_DIR"] -S3_PATH = os.environ["S3_PATH"] -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or "" +S3_PATH = os.environ.get("S3_PATH", "") +S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") -DB_USE_ENV = os.environ.get("DB_USE_ENV") or False +DB_USE_ENV = os.environ.get("DB_USE_ENV", False) DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") if not DB_NAME: @@ -20,7 +20,7 @@ DB_HOST = os.environ["DB_HOST"] DB_PASS = os.environ["DB_PASS"] DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT") or "5432" + DB_PORT = os.environ.get("DB_PORT", "5432") file_name = sys.argv[1] backup_file = os.path.join(BACKUP_DIR, file_name) @@ -48,7 +48,7 @@ def restore_backup(): if not backup_exists(): sys.stderr.write("Backup file doesn't exists!\n") sys.exit(1) - + # restore postgres-backup env = os.environ.copy() if DB_USE_ENV: @@ -71,10 +71,10 @@ def main(): else: log("Downloading database dump") download_backup() - + log("Restoring database") restore_backup() - + log("Restore complete, took %.2f seconds" % (datetime.now() - start_time).total_seconds()) if __name__ == "__main__": diff --git a/12/backup.py b/12/backup.py index c4bf67c..f34014c 100644 --- a/12/backup.py +++ b/12/backup.py @@ -9,11 +9,13 @@ BACKUP_DIR = os.environ["BACKUP_DIR"] -S3_PATH = os.environ["S3_PATH"] -S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS") or "STANDARD_IA" -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or "" +S3_PATH = os.environ.get("S3_PATH", "") +AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID") +AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY") +S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS", "STANDARD_IA") +S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") -DB_USE_ENV = os.environ.get("DB_USE_ENV") or False +DB_USE_ENV = os.environ.get("DB_USE_ENV", False) DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") if not DB_NAME: @@ -23,17 +25,17 @@ DB_HOST = os.environ["DB_HOST"] DB_PASS = os.environ["DB_PASS"] DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT") or "5432" + DB_PORT = os.environ.get("DB_PORT", "5432") MAIL_TO = os.environ.get("MAIL_TO") MAIL_FROM = os.environ.get("MAIL_FROM") WEBHOOK = os.environ.get("WEBHOOK") WEBHOOK_METHOD = os.environ.get("WEBHOOK_METHOD") WEBHOOK_DATA = os.environ.get("WEBHOOK_DATA") -WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS") or "" +WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS", "") KEEP_BACKUP_DAYS = int(os.environ.get("KEEP_BACKUP_DAYS", 7)) FILENAME = os.environ.get("FILENAME", DB_NAME + "_%Y-%m-%d") -PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS") or "" +PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS", "") file_name = dt.strftime(FILENAME) backup_file = os.path.join(BACKUP_DIR, file_name) @@ -112,8 +114,12 @@ def main(): take_backup() backup_size=os.path.getsize(backup_file) - log("Uploading to S3") - upload_backup() + if AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY: + log("Uploading to S3") + upload_backup() + else: + log("Skipping S3 upload, no AWS credentials provided") + log("Pruning local backup copies") prune_local_backup_files() end_time = datetime.now() diff --git a/12/restore.py b/12/restore.py index 4ce1d26..4abc06d 100644 --- a/12/restore.py +++ b/12/restore.py @@ -7,10 +7,10 @@ BACKUP_DIR = os.environ["BACKUP_DIR"] -S3_PATH = os.environ["S3_PATH"] -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or "" +S3_PATH = os.environ.get("S3_PATH", "") +S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") -DB_USE_ENV = os.environ.get("DB_USE_ENV") or False +DB_USE_ENV = os.environ.get("DB_USE_ENV", False) DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") if not DB_NAME: @@ -20,7 +20,7 @@ DB_HOST = os.environ["DB_HOST"] DB_PASS = os.environ["DB_PASS"] DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT") or "5432" + DB_PORT = os.environ.get("DB_PORT", "5432") file_name = sys.argv[1] backup_file = os.path.join(BACKUP_DIR, file_name) @@ -48,7 +48,7 @@ def restore_backup(): if not backup_exists(): sys.stderr.write("Backup file doesn't exists!\n") sys.exit(1) - + # restore postgres-backup env = os.environ.copy() if DB_USE_ENV: @@ -71,10 +71,10 @@ def main(): else: log("Downloading database dump") download_backup() - + log("Restoring database") restore_backup() - + log("Restore complete, took %.2f seconds" % (datetime.now() - start_time).total_seconds()) if __name__ == "__main__": diff --git a/13/backup.py b/13/backup.py index c4bf67c..f34014c 100644 --- a/13/backup.py +++ b/13/backup.py @@ -9,11 +9,13 @@ BACKUP_DIR = os.environ["BACKUP_DIR"] -S3_PATH = os.environ["S3_PATH"] -S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS") or "STANDARD_IA" -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or "" +S3_PATH = os.environ.get("S3_PATH", "") +AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID") +AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY") +S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS", "STANDARD_IA") +S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") -DB_USE_ENV = os.environ.get("DB_USE_ENV") or False +DB_USE_ENV = os.environ.get("DB_USE_ENV", False) DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") if not DB_NAME: @@ -23,17 +25,17 @@ DB_HOST = os.environ["DB_HOST"] DB_PASS = os.environ["DB_PASS"] DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT") or "5432" + DB_PORT = os.environ.get("DB_PORT", "5432") MAIL_TO = os.environ.get("MAIL_TO") MAIL_FROM = os.environ.get("MAIL_FROM") WEBHOOK = os.environ.get("WEBHOOK") WEBHOOK_METHOD = os.environ.get("WEBHOOK_METHOD") WEBHOOK_DATA = os.environ.get("WEBHOOK_DATA") -WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS") or "" +WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS", "") KEEP_BACKUP_DAYS = int(os.environ.get("KEEP_BACKUP_DAYS", 7)) FILENAME = os.environ.get("FILENAME", DB_NAME + "_%Y-%m-%d") -PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS") or "" +PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS", "") file_name = dt.strftime(FILENAME) backup_file = os.path.join(BACKUP_DIR, file_name) @@ -112,8 +114,12 @@ def main(): take_backup() backup_size=os.path.getsize(backup_file) - log("Uploading to S3") - upload_backup() + if AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY: + log("Uploading to S3") + upload_backup() + else: + log("Skipping S3 upload, no AWS credentials provided") + log("Pruning local backup copies") prune_local_backup_files() end_time = datetime.now() diff --git a/13/restore.py b/13/restore.py index 4ce1d26..4abc06d 100644 --- a/13/restore.py +++ b/13/restore.py @@ -7,10 +7,10 @@ BACKUP_DIR = os.environ["BACKUP_DIR"] -S3_PATH = os.environ["S3_PATH"] -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or "" +S3_PATH = os.environ.get("S3_PATH", "") +S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") -DB_USE_ENV = os.environ.get("DB_USE_ENV") or False +DB_USE_ENV = os.environ.get("DB_USE_ENV", False) DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") if not DB_NAME: @@ -20,7 +20,7 @@ DB_HOST = os.environ["DB_HOST"] DB_PASS = os.environ["DB_PASS"] DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT") or "5432" + DB_PORT = os.environ.get("DB_PORT", "5432") file_name = sys.argv[1] backup_file = os.path.join(BACKUP_DIR, file_name) @@ -48,7 +48,7 @@ def restore_backup(): if not backup_exists(): sys.stderr.write("Backup file doesn't exists!\n") sys.exit(1) - + # restore postgres-backup env = os.environ.copy() if DB_USE_ENV: @@ -71,10 +71,10 @@ def main(): else: log("Downloading database dump") download_backup() - + log("Restoring database") restore_backup() - + log("Restore complete, took %.2f seconds" % (datetime.now() - start_time).total_seconds()) if __name__ == "__main__": diff --git a/14/backup.py b/14/backup.py index c4bf67c..f34014c 100644 --- a/14/backup.py +++ b/14/backup.py @@ -9,11 +9,13 @@ BACKUP_DIR = os.environ["BACKUP_DIR"] -S3_PATH = os.environ["S3_PATH"] -S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS") or "STANDARD_IA" -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or "" +S3_PATH = os.environ.get("S3_PATH", "") +AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID") +AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY") +S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS", "STANDARD_IA") +S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") -DB_USE_ENV = os.environ.get("DB_USE_ENV") or False +DB_USE_ENV = os.environ.get("DB_USE_ENV", False) DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") if not DB_NAME: @@ -23,17 +25,17 @@ DB_HOST = os.environ["DB_HOST"] DB_PASS = os.environ["DB_PASS"] DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT") or "5432" + DB_PORT = os.environ.get("DB_PORT", "5432") MAIL_TO = os.environ.get("MAIL_TO") MAIL_FROM = os.environ.get("MAIL_FROM") WEBHOOK = os.environ.get("WEBHOOK") WEBHOOK_METHOD = os.environ.get("WEBHOOK_METHOD") WEBHOOK_DATA = os.environ.get("WEBHOOK_DATA") -WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS") or "" +WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS", "") KEEP_BACKUP_DAYS = int(os.environ.get("KEEP_BACKUP_DAYS", 7)) FILENAME = os.environ.get("FILENAME", DB_NAME + "_%Y-%m-%d") -PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS") or "" +PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS", "") file_name = dt.strftime(FILENAME) backup_file = os.path.join(BACKUP_DIR, file_name) @@ -112,8 +114,12 @@ def main(): take_backup() backup_size=os.path.getsize(backup_file) - log("Uploading to S3") - upload_backup() + if AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY: + log("Uploading to S3") + upload_backup() + else: + log("Skipping S3 upload, no AWS credentials provided") + log("Pruning local backup copies") prune_local_backup_files() end_time = datetime.now() diff --git a/14/restore.py b/14/restore.py index 4ce1d26..4abc06d 100644 --- a/14/restore.py +++ b/14/restore.py @@ -7,10 +7,10 @@ BACKUP_DIR = os.environ["BACKUP_DIR"] -S3_PATH = os.environ["S3_PATH"] -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or "" +S3_PATH = os.environ.get("S3_PATH", "") +S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") -DB_USE_ENV = os.environ.get("DB_USE_ENV") or False +DB_USE_ENV = os.environ.get("DB_USE_ENV", False) DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") if not DB_NAME: @@ -20,7 +20,7 @@ DB_HOST = os.environ["DB_HOST"] DB_PASS = os.environ["DB_PASS"] DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT") or "5432" + DB_PORT = os.environ.get("DB_PORT", "5432") file_name = sys.argv[1] backup_file = os.path.join(BACKUP_DIR, file_name) @@ -48,7 +48,7 @@ def restore_backup(): if not backup_exists(): sys.stderr.write("Backup file doesn't exists!\n") sys.exit(1) - + # restore postgres-backup env = os.environ.copy() if DB_USE_ENV: @@ -71,10 +71,10 @@ def main(): else: log("Downloading database dump") download_backup() - + log("Restoring database") restore_backup() - + log("Restore complete, took %.2f seconds" % (datetime.now() - start_time).total_seconds()) if __name__ == "__main__": diff --git a/15/backup.py b/15/backup.py index c4bf67c..f34014c 100644 --- a/15/backup.py +++ b/15/backup.py @@ -9,11 +9,13 @@ BACKUP_DIR = os.environ["BACKUP_DIR"] -S3_PATH = os.environ["S3_PATH"] -S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS") or "STANDARD_IA" -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or "" +S3_PATH = os.environ.get("S3_PATH", "") +AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID") +AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY") +S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS", "STANDARD_IA") +S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") -DB_USE_ENV = os.environ.get("DB_USE_ENV") or False +DB_USE_ENV = os.environ.get("DB_USE_ENV", False) DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") if not DB_NAME: @@ -23,17 +25,17 @@ DB_HOST = os.environ["DB_HOST"] DB_PASS = os.environ["DB_PASS"] DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT") or "5432" + DB_PORT = os.environ.get("DB_PORT", "5432") MAIL_TO = os.environ.get("MAIL_TO") MAIL_FROM = os.environ.get("MAIL_FROM") WEBHOOK = os.environ.get("WEBHOOK") WEBHOOK_METHOD = os.environ.get("WEBHOOK_METHOD") WEBHOOK_DATA = os.environ.get("WEBHOOK_DATA") -WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS") or "" +WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS", "") KEEP_BACKUP_DAYS = int(os.environ.get("KEEP_BACKUP_DAYS", 7)) FILENAME = os.environ.get("FILENAME", DB_NAME + "_%Y-%m-%d") -PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS") or "" +PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS", "") file_name = dt.strftime(FILENAME) backup_file = os.path.join(BACKUP_DIR, file_name) @@ -112,8 +114,12 @@ def main(): take_backup() backup_size=os.path.getsize(backup_file) - log("Uploading to S3") - upload_backup() + if AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY: + log("Uploading to S3") + upload_backup() + else: + log("Skipping S3 upload, no AWS credentials provided") + log("Pruning local backup copies") prune_local_backup_files() end_time = datetime.now() diff --git a/15/restore.py b/15/restore.py index 4ce1d26..4abc06d 100644 --- a/15/restore.py +++ b/15/restore.py @@ -7,10 +7,10 @@ BACKUP_DIR = os.environ["BACKUP_DIR"] -S3_PATH = os.environ["S3_PATH"] -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or "" +S3_PATH = os.environ.get("S3_PATH", "") +S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") -DB_USE_ENV = os.environ.get("DB_USE_ENV") or False +DB_USE_ENV = os.environ.get("DB_USE_ENV", False) DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") if not DB_NAME: @@ -20,7 +20,7 @@ DB_HOST = os.environ["DB_HOST"] DB_PASS = os.environ["DB_PASS"] DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT") or "5432" + DB_PORT = os.environ.get("DB_PORT", "5432") file_name = sys.argv[1] backup_file = os.path.join(BACKUP_DIR, file_name) @@ -48,7 +48,7 @@ def restore_backup(): if not backup_exists(): sys.stderr.write("Backup file doesn't exists!\n") sys.exit(1) - + # restore postgres-backup env = os.environ.copy() if DB_USE_ENV: @@ -71,10 +71,10 @@ def main(): else: log("Downloading database dump") download_backup() - + log("Restoring database") restore_backup() - + log("Restore complete, took %.2f seconds" % (datetime.now() - start_time).total_seconds()) if __name__ == "__main__": diff --git a/16/backup.py b/16/backup.py index c4bf67c..f34014c 100644 --- a/16/backup.py +++ b/16/backup.py @@ -9,11 +9,13 @@ BACKUP_DIR = os.environ["BACKUP_DIR"] -S3_PATH = os.environ["S3_PATH"] -S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS") or "STANDARD_IA" -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or "" +S3_PATH = os.environ.get("S3_PATH", "") +AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID") +AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY") +S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS", "STANDARD_IA") +S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") -DB_USE_ENV = os.environ.get("DB_USE_ENV") or False +DB_USE_ENV = os.environ.get("DB_USE_ENV", False) DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") if not DB_NAME: @@ -23,17 +25,17 @@ DB_HOST = os.environ["DB_HOST"] DB_PASS = os.environ["DB_PASS"] DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT") or "5432" + DB_PORT = os.environ.get("DB_PORT", "5432") MAIL_TO = os.environ.get("MAIL_TO") MAIL_FROM = os.environ.get("MAIL_FROM") WEBHOOK = os.environ.get("WEBHOOK") WEBHOOK_METHOD = os.environ.get("WEBHOOK_METHOD") WEBHOOK_DATA = os.environ.get("WEBHOOK_DATA") -WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS") or "" +WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS", "") KEEP_BACKUP_DAYS = int(os.environ.get("KEEP_BACKUP_DAYS", 7)) FILENAME = os.environ.get("FILENAME", DB_NAME + "_%Y-%m-%d") -PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS") or "" +PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS", "") file_name = dt.strftime(FILENAME) backup_file = os.path.join(BACKUP_DIR, file_name) @@ -112,8 +114,12 @@ def main(): take_backup() backup_size=os.path.getsize(backup_file) - log("Uploading to S3") - upload_backup() + if AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY: + log("Uploading to S3") + upload_backup() + else: + log("Skipping S3 upload, no AWS credentials provided") + log("Pruning local backup copies") prune_local_backup_files() end_time = datetime.now() diff --git a/16/restore.py b/16/restore.py index 4ce1d26..4abc06d 100644 --- a/16/restore.py +++ b/16/restore.py @@ -7,10 +7,10 @@ BACKUP_DIR = os.environ["BACKUP_DIR"] -S3_PATH = os.environ["S3_PATH"] -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or "" +S3_PATH = os.environ.get("S3_PATH", "") +S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") -DB_USE_ENV = os.environ.get("DB_USE_ENV") or False +DB_USE_ENV = os.environ.get("DB_USE_ENV", False) DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") if not DB_NAME: @@ -20,7 +20,7 @@ DB_HOST = os.environ["DB_HOST"] DB_PASS = os.environ["DB_PASS"] DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT") or "5432" + DB_PORT = os.environ.get("DB_PORT", "5432") file_name = sys.argv[1] backup_file = os.path.join(BACKUP_DIR, file_name) @@ -48,7 +48,7 @@ def restore_backup(): if not backup_exists(): sys.stderr.write("Backup file doesn't exists!\n") sys.exit(1) - + # restore postgres-backup env = os.environ.copy() if DB_USE_ENV: @@ -71,10 +71,10 @@ def main(): else: log("Downloading database dump") download_backup() - + log("Restoring database") restore_backup() - + log("Restore complete, took %.2f seconds" % (datetime.now() - start_time).total_seconds()) if __name__ == "__main__": diff --git a/README.md b/README.md index 059aaa6..e935f06 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [![Build Status](https://github.com/heyman/postgresql-backup/workflows/Test/badge.svg)](https://github.com/heyman/postgresql-backup/actions?query=workflow%3ATest) -Docker image that periodically dumps a Postgres database, and uploads it to an Amazon S3 bucket. +Docker image that periodically dumps a Postgres database, and optionally uploads it to an Amazon S3 bucket. Available on Docker Hub: [heyman/postgresql-backup](https://hub.docker.com/r/heyman/postgresql-backup) @@ -28,13 +28,13 @@ docker run -it --rm --name=pgbackup \ * `DB_PASS`: Postgres password * `DB_USER`: Postgres username * `DB_NAME`: Name of database + +## Optional environment variables + * `S3_PATH`: Amazon S3 path in the format: s3://bucket-name/some/path * `AWS_ACCESS_KEY_ID` * `AWS_SECRET_ACCESS_KEY` * `AWS_DEFAULT_REGION` - -## Optional environment variables - * `S3_STORAGE_CLASS`: Specify [storage class](https://docs.aws.amazon.com/AmazonS3/latest/userguide/storage-class-intro.html) for the uploaded object, defaults to `STANDARD_IA`. * `S3_EXTRA_OPTIONS`: Specify additional options for S3, e.g. `--endpoint=` for using custom S3 provider. * `DB_USE_ENV`: Inject [postgres environment variables](https://www.postgresql.org/docs/13/libpq-envars.html) from the environment. Ignores `DB_HOST`, `DB_PASS`, `DB_USER` and `DB_NAME`. Can be used to specify advanced connections, e.g. using mTLS connection. @@ -71,7 +71,7 @@ Text in `WEBHOOK_DATA` is interpolated with variabels `%(my_var)s` #### Example on how to post a Slack message when a backup is complete -1. Configure a webhook as described in the Slack [documentation](https://api.slack.com/messaging/webhooks). +1. Configure a webhook as described in the Slack [documentation](https://api.slack.com/messaging/webhooks). 2. Set `WEBHOOK` and `WEBHOOK_` accodringly: ``` WEBHOOK=https://hooks.slack.com/services/.../.../... @@ -82,13 +82,13 @@ Text in `WEBHOOK_DATA` is interpolated with variabels `%(my_var)s` ## Volumes -* `/data/backups` - The database is dumped in into this directory +* `/data/backups` - The database is dumped into this directory ## Restoring a backup -This image can also be run as a one off task to restore one of the backups. -To do this, we run the container with the command: `python -u /backup/restore.py [S3-filename]` -(`S3-filename` should only be the name of the file, the directory is set through the `S3_PATH` env variable). +This image can also be run as a one off task to restore one of the backups. +To do this, we run the container with the command: `python -u /backup/restore.py [backup-filename]` +(`backup-filename` should only be the name of the file, the directory is set through the `S3_PATH` env variable, if it needs to be downloaded). The following environment variables are required: @@ -96,6 +96,9 @@ The following environment variables are required: * `DB_PASS`: Postgres password * `DB_USER`: Postgres username * `DB_NAME`: Name of database to import into + +The following environment variables are required if the file to restore is not already in the backup volume: + * `S3_PATH`: Amazon S3 directory path in the format: s3://bucket-name/some/path * `AWS_ACCESS_KEY_ID` * `AWS_SECRET_ACCESS_KEY` @@ -108,17 +111,16 @@ The following environment variables are required: ## Taking a one off backup -To run a one off backup job, e.g. to test that it works when setting it up for the first time, simply start -the container with the docker run command set to `python -u /backup/backup.py` (as well as all the required environment +To run a one off backup job, e.g. to test that it works when setting it up for the first time, simply start +the container with the docker run command set to `python -u /backup/backup.py` (as well as all the required environment variables set). - ## Docker tags -This image uses the alpine version(s) of the [official postgres](https://hub.docker.com/_/postgres) image as base +This image uses the alpine version(s) of the [official postgres](https://hub.docker.com/_/postgres) image as base image. -The following docker tags are available for this image, and they are based on the corresponding official postgres +The following docker tags are available for this image, and they are based on the corresponding official postgres alpine image: * `16`, `latest` @@ -128,4 +130,3 @@ alpine image: * `12` * `11` * `10` - diff --git a/template/backup.py b/template/backup.py index c4bf67c..f34014c 100644 --- a/template/backup.py +++ b/template/backup.py @@ -9,11 +9,13 @@ BACKUP_DIR = os.environ["BACKUP_DIR"] -S3_PATH = os.environ["S3_PATH"] -S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS") or "STANDARD_IA" -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or "" +S3_PATH = os.environ.get("S3_PATH", "") +AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID") +AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY") +S3_STORAGE_CLASS = os.environ.get("S3_STORAGE_CLASS", "STANDARD_IA") +S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") -DB_USE_ENV = os.environ.get("DB_USE_ENV") or False +DB_USE_ENV = os.environ.get("DB_USE_ENV", False) DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") if not DB_NAME: @@ -23,17 +25,17 @@ DB_HOST = os.environ["DB_HOST"] DB_PASS = os.environ["DB_PASS"] DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT") or "5432" + DB_PORT = os.environ.get("DB_PORT", "5432") MAIL_TO = os.environ.get("MAIL_TO") MAIL_FROM = os.environ.get("MAIL_FROM") WEBHOOK = os.environ.get("WEBHOOK") WEBHOOK_METHOD = os.environ.get("WEBHOOK_METHOD") WEBHOOK_DATA = os.environ.get("WEBHOOK_DATA") -WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS") or "" +WEBHOOK_CURL_OPTIONS = os.environ.get("WEBHOOK_CURL_OPTIONS", "") KEEP_BACKUP_DAYS = int(os.environ.get("KEEP_BACKUP_DAYS", 7)) FILENAME = os.environ.get("FILENAME", DB_NAME + "_%Y-%m-%d") -PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS") or "" +PG_DUMP_EXTRA_OPTIONS = os.environ.get("PG_DUMP_EXTRA_OPTIONS", "") file_name = dt.strftime(FILENAME) backup_file = os.path.join(BACKUP_DIR, file_name) @@ -112,8 +114,12 @@ def main(): take_backup() backup_size=os.path.getsize(backup_file) - log("Uploading to S3") - upload_backup() + if AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY: + log("Uploading to S3") + upload_backup() + else: + log("Skipping S3 upload, no AWS credentials provided") + log("Pruning local backup copies") prune_local_backup_files() end_time = datetime.now() diff --git a/template/restore.py b/template/restore.py index 4ce1d26..4abc06d 100644 --- a/template/restore.py +++ b/template/restore.py @@ -7,10 +7,10 @@ BACKUP_DIR = os.environ["BACKUP_DIR"] -S3_PATH = os.environ["S3_PATH"] -S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS") or "" +S3_PATH = os.environ.get("S3_PATH", "") +S3_EXTRA_OPTIONS = os.environ.get("S3_EXTRA_OPTIONS", "") -DB_USE_ENV = os.environ.get("DB_USE_ENV") or False +DB_USE_ENV = os.environ.get("DB_USE_ENV", False) DB_NAME = os.environ["DB_NAME"] if "DB_NAME" in os.environ else os.environ.get("PGDATABASE") if not DB_NAME: @@ -20,7 +20,7 @@ DB_HOST = os.environ["DB_HOST"] DB_PASS = os.environ["DB_PASS"] DB_USER = os.environ["DB_USER"] - DB_PORT = os.environ.get("DB_PORT") or "5432" + DB_PORT = os.environ.get("DB_PORT", "5432") file_name = sys.argv[1] backup_file = os.path.join(BACKUP_DIR, file_name) @@ -48,7 +48,7 @@ def restore_backup(): if not backup_exists(): sys.stderr.write("Backup file doesn't exists!\n") sys.exit(1) - + # restore postgres-backup env = os.environ.copy() if DB_USE_ENV: @@ -71,10 +71,10 @@ def main(): else: log("Downloading database dump") download_backup() - + log("Restoring database") restore_backup() - + log("Restore complete, took %.2f seconds" % (datetime.now() - start_time).total_seconds()) if __name__ == "__main__":