Skip to content

Commit

Permalink
Adds generational backup
Browse files Browse the repository at this point in the history
  • Loading branch information
sabeechen committed Apr 2, 2019
1 parent a79e36d commit 6de3ffa
Show file tree
Hide file tree
Showing 9 changed files with 451 additions and 20 deletions.
8 changes: 8 additions & 0 deletions .vscode/launch.json
Expand Up @@ -13,6 +13,14 @@
"args" : ["${workspaceFolder}/dev/data/options.json"],
"cwd": "${workspaceFolder}/hassio-google-drive-backup"
},
{
"name": "Python: Mock Hass.io",
"type": "python",
"request": "launch",
"program": "${workspaceFolder}/dev/mock_hassio.py",
"args" : [],
"cwd": "${workspaceFolder}/dev"
},
{
"name": "Python: Current File (Integrated Terminal)",
"type": "python",
Expand Down
14 changes: 10 additions & 4 deletions dev/data/options.json
@@ -1,7 +1,7 @@
{
"max_snapshots_in_hassio": 0,
"max_snapshots_in_google_drive": 0,
"days_between_snapshots": 0,
"max_snapshots_in_hassio": 5,
"max_snapshots_in_google_drive": 5,
"days_between_snapshots": 1,
"verbose" : true,
"use_ssl": false,
"hassio_base_url": "http://127.0.0.1:5000/",
Expand All @@ -12,5 +12,11 @@
"keyfile": "../dev/ssl/privkey.pem",
"folder_file_path": "../dev/data/folder.dat",
"credentials_file_path": "../dev/data/credentials.dat",
"snapshot_time_of_day": "21:24"
"snapshot_time_of_day": "21:24",
"generational_backup": {
"days": 3,
"weeks": 2,
"months": 2,
"years": 2
}
}
12 changes: 10 additions & 2 deletions dev/mock_hassio.py
Expand Up @@ -21,6 +21,7 @@
from threading import Lock
from flask_api import status # type: ignore
from shutil import copyfile
from dateutil.parser import parse

app = Flask(__name__)

Expand All @@ -42,9 +43,16 @@ def getsnapshots() -> str:

@app.route('/snapshots/new/full', methods=['POST'])
def newSnapshot() -> Any:
pprint(request.args)
seconds = NEW_SNAPSHOT_SLEEP_SECONDS
if 'seconds' in request.args.keys():
if 'seconds' in request.args.keys(): # type: ignore
seconds = int(request.args['seconds'])

date: Optional[datetime] = None
if 'date' in request.args.keys(): # type: ignore
date = parse(request.args['date'], tzinfos=tzutc)
else:
date = datetime.now(tzutc())
if not snapshot_lock.acquire(blocking=False):
return "", status.HTTP_400_BAD_REQUEST
try:
Expand All @@ -54,7 +62,7 @@ def newSnapshot() -> Any:
sleep(seconds)
snapshots.append({
'name' : name,
'date' : str(datetime.now(tzutc()).isoformat()),
'date' : str(date.isoformat()),
'size' : os.path.getsize(TAR_FILE) / 1024.0 / 1024.0,
'slug' : slug
})
Expand Down
111 changes: 105 additions & 6 deletions hassio-google-drive-backup/backup/backupscheme.py
@@ -1,10 +1,109 @@
import datetime
from typing import Dict, List, Tuple, Sequence, Optional
from abc import ABC, abstractmethod
from .snapshots import Snapshot
from .time import Time
from datetime import datetime
from datetime import timedelta
from calendar import monthrange

from typing import Dict, List, Tuple

class BackupScheme(object):
def __init__(self, partitions: List[datetime.datetime]):
self.partitions = None
class BackupScheme(ABC):
def __init__(self):
pass

def getOldest(self):
@abstractmethod
def getOldest(self, snapshots: Sequence[Snapshot]) -> Optional[Snapshot]:
pass


class OldestScheme(BackupScheme):
def getOldest(self, snapshots: Sequence[Snapshot]) -> Optional[Snapshot]:
return min(snapshots, default=None, key=lambda s: s.date())


class Partition(object):
def __init__(self, start: datetime, end: datetime, prefer: datetime):
self.start: datetime = start
self.end: datetime = end
self.prefer: datetime = prefer

def select(self, snapshots: List[Snapshot]) -> Optional[Snapshot]:
options: List[Snapshot] = []
for snapshot in snapshots:
if snapshot.date() >= self.start and snapshot.date() < self.end:
options.append(snapshot)
return min(options, default=None, key=lambda s: abs((s.date() - self.prefer).total_seconds()))


class GenerationalScheme(BackupScheme):
def __init__(self, time: Time, partitions: Dict[str, int]):
self.time: Time = time
self.partitions: Dict[str, Any] = partitions
pass

def getOldest(self, to_segment: Sequence[Snapshot]) -> Optional[Snapshot]:
snapshots: List[Snapshot] = list(to_segment)

# build the list of dates we should partition by

if len(snapshots) == 0:
return None

snapshots.sort(key=lambda s: s.date())

day_of_week = 3
lookup = {
'mon': 0,
'tue': 1,
'wed': 2,
'thu': 3,
'fri': 4,
'sat': 5,
'sun': 6,
}
if 'day_of_week' in self.partitions and self.partitions['day_of_week'] in lookup:
day_of_week = lookup[self.partitions['day_of_week']]


last = self.time.toLocal(snapshots[len(snapshots) - 1].date())
lookups: List[Partition] = []
for x in range(0, self.partitions['days']):
start = datetime(last.year, last.month, last.day, tzinfo=last.tzinfo) - timedelta(days=x)
end = start + timedelta(days=1)
lookups.append(Partition(start, end, start + timedelta(hours=12)))

for x in range(0, self.partitions['weeks']):
start = datetime(last.year, last.month, last.day, tzinfo=last.tzinfo) - timedelta(days=last.weekday()) - timedelta(weeks=x)
end = start + timedelta(days=7)
lookups.append(Partition(start, end, start + timedelta(days=day_of_week, hours=12)))

for x in range(0, self.partitions['months']):
year_offset = int(x / 12)
month_offset = int(x % 12)
if last.month - month_offset < 1:
year_offset = year_offset + 1
month_offset = month_offset - 12
start = datetime(last.year - year_offset, last.month - month_offset, 1, tzinfo=last.tzinfo)
weekday, days = monthrange(start.year, start.month)
end = start + timedelta(days=days)
lookups.append(Partition(start, end, start + timedelta(days=self.partitions['day_of_month'] - 1)))

for x in range(0, self.partitions['years']):
start = datetime(last.year - x, 1, 1, tzinfo=last.tzinfo)
end = datetime(last.year - x + 1, 1, 1, tzinfo=last.tzinfo)
lookups.append(Partition(start, end, start + timedelta(days=self.partitions['day_of_year'] - 1)))

keepers = set()
for lookup in lookups:
keeper = lookup.select(snapshots)
if keeper:
keepers.add(keeper)

for snapshot in snapshots:
if snapshot not in keepers:
return snapshot

# no non-keep is invalid, so delete the oldest keeper
return min(keepers, default=None, key=lambda s: s.date())


21 changes: 21 additions & 0 deletions hassio-google-drive-backup/backup/config.py
Expand Up @@ -114,3 +114,24 @@ def snapshotTimeOfDay(self) -> Optional[str]:
if len(str(self.config['snapshot_time_of_day'])) > 0:
return str(self.config['snapshot_time_of_day'])
return None

def getGenerationalConfig(self) -> Optional[Dict[str, Any]]:
if 'generational_backup' in self.config:
base = self.config['generational_backup']
if 'days' not in base:
base['days'] = 0
if 'weeks' not in base:
base['weeks'] = 0
if 'months' not in base:
base['months'] = 0
if 'years' not in base:
base['years'] = 0
if 'day_of_week' not in base:
base['day_of_week'] = 'mon'
if 'day_of_month' not in base:
base['day_of_month'] = 1
if 'day_of_year' not in base:
base['day_of_year'] = 1
return base
else:
return None
19 changes: 14 additions & 5 deletions hassio-google-drive-backup/backup/engine.py
Expand Up @@ -18,6 +18,7 @@
from datetime import datetime
from typing import Dict, List, Optional, Callable, Any, Sequence
from oauth2client.client import Credentials #type: ignore
from .backupscheme import GenerationalScheme, OldestScheme


BAD_TOKEN_ERROR_MESSAGE: str = "Google rejected the credentials we gave it. Please use the \"Reauthorize\" button on the right to give the Add-on permission to use Google Drive again. This can happen if you change your account password, you revoke the add-on's access, your Google Account has been inactive for 6 months, or your system's clock is off."
Expand Down Expand Up @@ -56,6 +57,12 @@ def __init__(self, config: Config, drive: Drive, hassio: Hassio, time: Time):
self.next_error_backoff: int = ERROR_BACKOFF_MIN_SECS
self.one_shot: bool = False
self.snapshots_stale: bool = False

gen_config = self.config.getGenerationalConfig()
if gen_config:
self.scheme = GenerationalScheme(self.time, gen_config)
else:
self.scheme = OldestScheme()

def saveCreds(self, creds: Credentials) -> None:
self.drive.saveCreds(creds)
Expand Down Expand Up @@ -251,20 +258,22 @@ def _syncSnapshots(self) -> None:

def _purgeDriveBackups(self) -> None:
while self.drive.enabled() and self.config.maxSnapshotsInGoogleDrive() > 0 and self.driveSnapshotCount() > self.config.maxSnapshotsInGoogleDrive():
oldest: Snapshot = min(filter(DRIVE_LAMBDA, self.snapshots), key=DATE_LAMBDA)
oldest: Snapshot = self.scheme.getOldest(filter(DRIVE_LAMBDA, self.snapshots))
self.drive.deleteSnapshot(oldest)
if oldest.isDeleted():
self.snapshots.remove(oldest)

def _checkForBackup(self) -> None:
# Get the local and remote snapshots available
self._syncSnapshots()
def _purgeHaSnapshots(self) -> None:
while self.config.maxSnapshotsInHassio() > 0 and self.haSnapshotCount() > self.config.maxSnapshotsInHassio():
oldest_hassio: Snapshot = min(filter(HA_LAMBDA, self.snapshots), key=DATE_LAMBDA)
oldest_hassio: Snapshot = self.scheme.getOldest(filter(HA_LAMBDA, self.snapshots))
self.hassio.deleteSnapshot(oldest_hassio)
if not oldest_hassio.isInDrive():
self.snapshots.remove(oldest_hassio)

def _checkForBackup(self) -> None:
# Get the local and remote snapshots available
self._syncSnapshots()
self._purgeHaSnapshots()
self._purgeDriveBackups()

oldest: Optional[Snapshot] = None
Expand Down
4 changes: 2 additions & 2 deletions hassio-google-drive-backup/backup/snapshots.py
Expand Up @@ -79,7 +79,7 @@ def date(self) -> datetime:
return parseDateTime(self.source['date'])

def __str__(self) -> str:
return "<HA: {0} Name: {1}>".format(self.slug(), self.name())
return "<HA: {0} Name: {1} {2}>".format(self.slug(), self.name(), self.date().isoformat())

def __format__(self, format_spec: str) -> str:
return self.__str__()
Expand Down Expand Up @@ -231,7 +231,7 @@ def uploading(self, percent: int) -> None:
self.uploading_pct = percent

def __str__(self) -> str:
return "<Slug: {0} Ha: {1} Drive: {2} Pending: {3}>".format(self.slug(), self.ha, self.driveitem, self.pending)
return "<Slug: {0} Ha: {1} Drive: {2} Pending: {3} {4}>".format(self.slug(), self.ha, self.driveitem, self.pending, self.date().isoformat())

def __format__(self, format_spec: str) -> str:
return self.__str__()
Expand Down

0 comments on commit 6de3ffa

Please sign in to comment.