Skip to content
This repository has been archived by the owner on Feb 14, 2024. It is now read-only.

Commit

Permalink
Add db_backup python script and fix pg_restore.sh
Browse files Browse the repository at this point in the history
  • Loading branch information
aloftus23 committed Nov 9, 2022
1 parent 0c89337 commit 4885a4d
Show file tree
Hide file tree
Showing 3 changed files with 74 additions and 2 deletions.
70 changes: 70 additions & 0 deletions src/pe_reports/pe_db/db_backup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
"""Run the database backup script and save to S3 bucket."""
# Standard Python Libraries
import datetime
import logging
import os

# Third-Party Libraries
import boto3
from botocore.exceptions import ClientError
from importlib_resources import files

BACKUP_SCRIPT = files("pe_reports").joinpath("pe_db/pg_backup.sh")
BUCKET_NAME = "cisa-crossfeed-pe-db-backups"
DATE = datetime.datetime.now().strftime("%Y-%m-%d")
LOGGER = logging.getLogger(__name__)


def run_backup():
"""Run database backup script."""
failed = False
try:
LOGGER.info("Running database backup...")
LOGGER.info(BACKUP_SCRIPT)
cmd = f"bash {BACKUP_SCRIPT}"
os.system(cmd)
LOGGER.info("Success")
except Exception as e:
failed = True
LOGGER.error(e)
LOGGER.error("Failed running backup script.")
return failed


def upload_file_to_s3(file_name, datestring, bucket):
"""Upload a file to an S3 bucket."""
LOGGER.info("Running S3 upload script.")
LOGGER.info(file_name)
s3_client = boto3.client("s3")

# If S3 object_name was not specified, use file_name
object_name = f"{datestring}/{os.path.basename(file_name)}"
try:
response = s3_client.upload_file(file_name, bucket, object_name)
if response == None:
LOGGER.info("Success uploading to S3.")
else:
LOGGER.error(response)
except ClientError as e:
LOGGER.error(e)


def main():
"""Run the database backup script and save to S3 bucket."""
# Run DB backup script
failed = run_backup()

if failed:
LOGGER.error("Not uploading to S3.")
return

# Upload each DB backup file to the specified S3 bucket
backup_files = ["pedb_dump.sql", "pedb_globals.sql", "stderr.txt"]
for file in backup_files:
base = f"/var/www/db_backups/backups_{DATE}"
file_name = f"{base}/{file}"
upload_file_to_s3(file_name, DATE, BUCKET_NAME)


if __name__ == "__main__":
main()
2 changes: 1 addition & 1 deletion src/pe_reports/pe_db/pg_backup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ set -o nounset
set -o pipefail

# Set path to save backup files
path=$PWD
path=/var/www/db_backups
backup_folder=$path/backups_$(date +%m-%d-%Y)

globalsqlfile=$backup_folder/pedb_globals.sql
Expand Down
4 changes: 3 additions & 1 deletion src/pe_reports/pe_db/pg_restore.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,9 @@ set -o pipefail
# Set path to pg dump and globals backup files. ex: /Users/user/Desktop/backups
path=$PWD

dropdb pe ---host localhost --username postgres --if-exists
dropdb pe --host localhost --username postgres --if-exists

dropdb rdsadmin --host localhost --username postgres --if-exists

globalsql=$path/pedb_globals.sql
dumpsql=$path/pedb_dump.sql
Expand Down

0 comments on commit 4885a4d

Please sign in to comment.