Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main' into zip-files
Browse files Browse the repository at this point in the history
  • Loading branch information
Akarys42 committed Jun 10, 2021
2 parents b4e0f92 + 5a8e999 commit 787587e
Show file tree
Hide file tree
Showing 17 changed files with 111 additions and 134 deletions.
126 changes: 62 additions & 64 deletions blackbox/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@
The backups are stored on your favorite cloud storage providers, and Blackbox will notify
you on your chat platform of choice once the job is done.
"""
import datetime
import logging
import os
from pathlib import Path
from tempfile import TemporaryDirectory
from textwrap import dedent

import click
Expand Down Expand Up @@ -40,71 +41,68 @@ def run() -> bool:

all_workflows = workflows.get_workflows(database_handlers, storage_handlers, notifier_handlers)

backup_files = []

for workflow in all_workflows:
database = workflow.database

# Do a backup, then return the path to the backup file
backup_file = database.backup()
backup_files.append(backup_file)
database_id = database.get_id_for_retention()
database.teardown()

# Add report to notifiers
report = DatabaseReport(database.config["id"], database.success, database.output)
for notifier in workflow.notifiers:
notifier.add_database(report)

# If backup failed, continue to next database. No need to sync.
if not database.success:
continue

for storage in workflow.storage_providers:
# Sync the provider, then rotate and cleanup
storage.sync(backup_file)
storage.rotate(database_id)
storage.teardown()

# Store the outcome to the database report
report.report_storage(storage.config["id"], storage.success, storage.output)

# Set overall program success to False if workflow is unsuccessful
if report.success is False:
success = False

cooldown = CONFIG['cooldown']
logging.debug(f"Cooldown setting is {cooldown}")
if cooldown:
is_on_cooldown_ = is_on_cooldown(cooldown)

# Send a report for each notifier configured
for notifier in notifier_handlers["all"]:
# Don't send a notification if no database uses the notifier
if notifier.report.is_empty:
continue

# If cooldown is not set or if report is failed: just notify.

if cooldown is None or not notifier.report.success:
log.debug('Config not found or backup failed, sending notification.')
notifier.notify()

# But otherwise let's check do we have a right to notify
else:
if not is_on_cooldown_:
with TemporaryDirectory() as backup_dir:
log.info(f"Backing up to folder: {backup_dir}")
backup_dir = Path(backup_dir)
date = datetime.date.today().strftime("%d_%m_%Y")
backup_files = []

for workflow in all_workflows:
database = workflow.database

# Do a backup, then return the path to the backup file
backup_filename = f"{database.config['id']}_blackbox_{date}{database.backup_extension}"
backup_path = backup_dir / backup_filename
database.backup(backup_path)
backup_files.append(backup_path)
database_id = database.get_id_for_retention()
database.teardown()

# Add report to notifiers
report = DatabaseReport(database.config["id"], database.success, database.output)
for notifier in workflow.notifiers:
notifier.add_database(report)

# If backup failed, continue to next database. No need to sync.
if not database.success:
continue

for storage in workflow.storage_providers:
# Sync the provider, then rotate and cleanup
storage.sync(backup_path)
storage.rotate(database_id)
storage.teardown()

# Store the outcome to the database report
report.report_storage(storage.config["id"], storage.success, storage.output)

# Set overall program success to False if workflow is unsuccessful
if report.success is False:
success = False

cooldown = CONFIG['cooldown']
logging.debug(f"Cooldown setting is {cooldown}")
if cooldown:
is_on_cooldown_ = is_on_cooldown(cooldown)

# Send a report for each notifier configured
for notifier in notifier_handlers["all"]:
# Don't send a notification if no database uses the notifier
if notifier.report.is_empty:
continue

# If cooldown is not set or if report is failed: just notify.
if cooldown is None or not notifier.report.success:
log.debug('Config not found or backup failed, sending notification.')
notifier.notify()

notifier.teardown()
# But otherwise let's check do we have a right to notify
else:
if not is_on_cooldown_:
notifier.notify()

# Clean up databases backups.
for file in backup_files:
try:
os.remove(file)
log.info(f"{file} deleted.")
except OSError:
log.info(f"{file} is not deleted.")
return success
notifier.teardown()
return success


@click.command()
Expand All @@ -113,7 +111,7 @@ def run() -> bool:
@click.option('--version', is_flag=True, help="Show version and exit")
def cli(config, init, version):
"""
BLACKBOX
BLACKBOX
Backup database to external storage system
""" # noqa
Expand Down
16 changes: 11 additions & 5 deletions blackbox/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,10 @@ class YAMLGetter(type):
Supports getting configuration from up to two levels
of nested configuration through `section` and `subsection`.
- `section` specifies the YAML configuration section (or "key") in which the configuration lives.
- `subsection` specifies the section within the section from which configuration should be loaded.
- `section` specifies the YAML configuration section
(or "key") in which the configuration lives.
- `subsection` specifies the section within the
section from which configuration should be loaded.
If neither are these are set, it'll just try to get it from the root level.
Expand Down Expand Up @@ -79,8 +81,9 @@ def __getattr__(cls, name):
# allows us to comfortably modify the `_config` at some point before we start using
# any of the Handlers, without worrying about any race conditions.
#
# If you need this config to be set before or after the first call to __getattr__, simply call
# YAMLGetter.parse_config(). You can pass in a configuration file path if you need to.
# If you need this config to be set before or after the first call to __getattr__,
# simply call YAMLGetter.parse_config().
# You can pass in a configuration file path if you need to.
if not cls._config:
cls.parse_config()

Expand All @@ -93,7 +96,10 @@ def __getattr__(cls, name):
return cls._config[cls.section][cls.subsection][name]
except KeyError:
# If one of the handler lists isn't defined, return an empty list.
log.warning(f"{name} is not defined in the blackbox.yaml file -- returning an falsy value.")
log.warning(
f"{name} is not defined in the blackbox.yaml file -- "
"returning an falsy value."
)
if cls._get_annotation(name) == list:
return []
elif cls._get_annotation(name) == dict:
Expand Down
5 changes: 3 additions & 2 deletions blackbox/handlers/databases/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ class BlackboxDatabase(BlackboxHandler):
"""An abstract database handler."""

handler_type = "database"
backup_extension = ""

def __init__(self, **kwargs):
"""Set up database handler."""
Expand All @@ -17,9 +18,9 @@ def __init__(self, **kwargs):
self.output = "" # What did the backup output?

@abstractmethod
def backup(self) -> Path:
def backup(self, backup_path: Path):
"""
Back up a database and return the Path for the backup file.
Back up a database to the provided backup Path.
All subclasses must implement this method.
"""
Expand Down
3 changes: 2 additions & 1 deletion blackbox/handlers/databases/localstorage.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,8 @@ def __init__(self, **kwargs) -> None:
if compression_level := kwargs.get("compression_level"):
if compression_level < 0 or compression_level > 9:
raise ImproperlyConfigured(
f"Invalid compression level. Must be an integer between 0 and 9, got {compression_level}."
f"Invalid compression level. "
f"Must be an integer between 0 and 9, got {compression_level}."
)

super().__init__(**kwargs)
Expand Down
13 changes: 2 additions & 11 deletions blackbox/handlers/databases/mariadb.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
import datetime
from pathlib import Path

from blackbox.handlers.databases._base import BlackboxDatabase
from blackbox.utils import run_command
from blackbox.utils.logger import log
Expand All @@ -10,18 +7,15 @@ class MariaDB(BlackboxDatabase):
"""A Database handler that will do a mysqldump for MariaDB, backing up all tables."""

required_fields = ("username", "password", "host", )
backup_extension = ".sql"

def backup(self) -> Path:
def backup(self, backup_path) -> None:
"""Dump all the data to a file and then return the filepath."""
date = datetime.date.today().strftime("%d_%m_%Y")

user = self.config["username"]
password = self.config["password"]
host = self.config["host"]
port = str(self.config.get("port", "3306"))

backup_path = Path.home() / f"{self.config['id']}_blackbox_{date}.sql"

# Run the backup, and store the outcome.
self.success, self.output = run_command(
f"mysqldump -h {host} -u {user} --password='{password}' "
Expand All @@ -33,6 +27,3 @@ def backup(self) -> Path:
if "error" in self.output.lower():
self.success = False
log.debug("mysqldump has error(s) in log")

# Return the path to the backup file
return backup_path
14 changes: 3 additions & 11 deletions blackbox/handlers/databases/mongodb.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
import datetime
from pathlib import Path

from blackbox.handlers.databases._base import BlackboxDatabase
from blackbox.utils import run_command
from blackbox.utils.logger import log
Expand All @@ -15,21 +12,16 @@ class MongoDB(BlackboxDatabase):
"""

required_fields = ("connection_string",)
backup_extension = ".archive"

def backup(self) -> Path:
def backup(self, backup_path) -> None:
"""Dump all the data to a file and then return the filepath."""
date = datetime.date.today().strftime("%d_%m_%Y")
archive_file = Path.home() / f"{self.config['id']}_blackbox_{date}.archive"

# Run the backup, and store the outcome in this object.
self.success, self.output = run_command(
f"mongodump "
f"--uri={self.config['connection_string']} "
"--gzip "
"--forceTableScan "
f"--archive={archive_file}"
f"--archive={backup_path}"
)
log.debug(self.output)

# Return the path to the backup file
return archive_file
12 changes: 2 additions & 10 deletions blackbox/handlers/databases/postgres.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
import datetime
from pathlib import Path

from blackbox.handlers.databases._base import BlackboxDatabase
from blackbox.utils import run_command
from blackbox.utils.logger import log
Expand All @@ -10,12 +7,10 @@ class Postgres(BlackboxDatabase):
"""A Database handler that will do a pg_dumpall for Postgres, backing up all tables."""

required_fields = ("username", "password", "host", )
backup_extension = ".sql"

def backup(self) -> Path:
def backup(self, backup_path) -> None:
"""Dump all the data to a file and then return the filepath."""
date = datetime.date.today().strftime("%d_%m_%Y")
backup_path = Path.home() / f"{self.config['id']}_blackbox_{date}.sql"

# Run the backup, and store the outcome.
self.success, self.output = run_command(
f"pg_dumpall --file={backup_path}",
Expand All @@ -25,6 +20,3 @@ def backup(self) -> Path:
PGPORT=str(self.config.get("port", "5432")),
)
log.debug(self.output)

# Return the path to the backup file
return backup_path
12 changes: 2 additions & 10 deletions blackbox/handlers/databases/redis.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
import datetime
from pathlib import Path

from blackbox.handlers.databases._base import BlackboxDatabase
from blackbox.utils import run_command
from blackbox.utils.logger import log
Expand All @@ -10,12 +7,10 @@ class Redis(BlackboxDatabase):
"""A Database handler that will run a redis-cli command for Redis backup."""

required_fields = ("password", "host", )
backup_extension = ".rdb"

def backup(self) -> Path:
def backup(self, backup_path) -> None:
"""Dump all the data to a file and then return the filepath."""
date = datetime.date.today().strftime("%d_%m_%Y")
backup_path = Path.home() / f"{self.config['id']}_blackbox_{date}.rdb"

# Run the backup, and store the outcome.
self.success, self.output = run_command(
"redis-cli "
Expand All @@ -25,6 +20,3 @@ def backup(self) -> Path:
REDISCLI_AUTH=self.config.get("password")
)
log.debug(self.output)

# Return the path to the backup file
return backup_path
2 changes: 1 addition & 1 deletion blackbox/handlers/notifiers/discord.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def _parse_report(self) -> dict:
}
],
"username": "blackbox",
"avatar_url": "https://raw.githubusercontent.com/lemonsaurus/blackbox/main/img/blackbox_avatar.png"
"avatar_url": "https://raw.githubusercontent.com/lemonsaurus/blackbox/main/img/blackbox_avatar.png" # NOQA: E501
}

def notify(self):
Expand Down
4 changes: 2 additions & 2 deletions blackbox/handlers/notifiers/slack.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def _parse_report_classic(self) -> dict:
"mrkdwn_in": ["fields"],
"title": "Backup",
"author_name": "blackbox",
"author_icon": "https://raw.githubusercontent.com/lemonsaurus/blackbox/main/img/blackbox_avatar.png"
"author_icon": "https://raw.githubusercontent.com/lemonsaurus/blackbox/main/img/blackbox_avatar.png" # NOQA: E501
}

# Combine and truncate total output to < 2000 characters, fields don't support more.
Expand Down Expand Up @@ -139,7 +139,7 @@ def _parse_report_modern(self) -> dict:
"elements": [
{
"type": "image",
"image_url": "https://raw.githubusercontent.com/lemonsaurus/blackbox/main/img/blackbox_avatar.png",
"image_url": "https://raw.githubusercontent.com/lemonsaurus/blackbox/main/img/blackbox_avatar.png", # NOQA: E501
"alt_text": "blackbox"
},
{
Expand Down
5 changes: 4 additions & 1 deletion blackbox/utils/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,10 @@ def get_configured_handlers(config: dict) -> dict:
return handler_dict


def get_handlers_by_id(id_: t.Union[str, list[str]], handlers: HandlerById[Handler]) -> set[Handler]:
def get_handlers_by_id(
id_: t.Union[str, list[str]],
handlers: HandlerById[Handler]
) -> set[Handler]:
"""
Given ids and a mapping of id to handlers, return handlers matching the ids.
Expand Down
2 changes: 1 addition & 1 deletion tests/test_discord.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def test_discord_notify(mock_valid_discord_config, report):
discord.report = report

assert discord._parse_report() == {
'avatar_url': 'https://raw.githubusercontent.com/lemonsaurus/blackbox/main/img/blackbox_avatar.png',
'avatar_url': 'https://raw.githubusercontent.com/lemonsaurus/blackbox/main/img/blackbox_avatar.png', # NOQA: E501
'content': None,
'embeds': [{'color': 1024049,
'fields': [{'inline': True,
Expand Down
Loading

0 comments on commit 787587e

Please sign in to comment.