Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
cloud backups to aws
  • Loading branch information
usmannasir committed Dec 8, 2020
1 parent e1f1f52 commit 43d514c
Show file tree
Hide file tree
Showing 6 changed files with 184 additions and 22 deletions.
7 changes: 5 additions & 2 deletions cloudAPI/cloudManager.py
Expand Up @@ -961,9 +961,12 @@ def fetchBackupLogs(self, request):

def forceRunAWSBackup(self, request):
try:

request.session['userID'] = self.admin.pk
s3 = S3Backups(request, self.data, 'forceRunAWSBackup')
s3.start()

execPath = "/usr/local/CyberCP/bin/python /usr/local/CyberCP/IncBackups/IncScheduler.py Daily"
ProcessUtilities.popenExecutioner(execPath)

return self.ajaxPre(1, None)
except BaseException as msg:
return self.ajaxPre(0, str(msg))
Expand Down
87 changes: 86 additions & 1 deletion plogical/IncScheduler.py
Expand Up @@ -18,11 +18,15 @@
from googleapiclient.http import MediaFileUpload
from plogical.backupSchedule import backupSchedule
import requests
from websiteFunctions.models import NormalBackupJobs, NormalBackupSites, NormalBackupDests, NormalBackupJobLogs
from websiteFunctions.models import NormalBackupJobs, NormalBackupJobLogs

try:
from s3Backups.models import BackupPlan, BackupLogs
import boto3
from plogical.virtualHostUtilities import virtualHostUtilities
from plogical.mailUtilities import mailUtilities
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
from plogical.processUtilities import ProcessUtilities
except:
pass

Expand Down Expand Up @@ -589,6 +593,86 @@ def startNormalBackups(type):
backupjob.config = json.dumps(jobConfig)
backupjob.save()

@staticmethod
def fetchAWSKeys():
path = '/home/cyberpanel/.aws'
credentials = path + '/credentials'

data = open(credentials, 'r').readlines()

aws_access_key_id = data[1].split(' ')[2].strip(' ').strip('\n')
aws_secret_access_key = data[2].split(' ')[2].strip(' ').strip('\n')
region = data[3].split(' ')[2].strip(' ').strip('\n')

return aws_access_key_id, aws_secret_access_key, region

@staticmethod
def forceRunAWSBackup():
try:

plan = BackupPlan.objects.get(name='hi')
bucketName = plan.bucket.strip('\n').strip(' ')
runTime = time.strftime("%d:%m:%Y")

aws_access_key_id, aws_secret_access_key, region = IncScheduler.fetchAWSKeys()

client = boto3.client(
's3',
aws_access_key_id = aws_access_key_id,
aws_secret_access_key = aws_secret_access_key,
#region_name=region
)

##


BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Starting backup process..').save()

PlanConfig = json.loads(plan.config)

for items in plan.websitesinplan_set.all():

from plogical.backupUtilities import backupUtilities
tempStatusPath = "/home/cyberpanel/" + str(randint(1000, 9999))
extraArgs = {}
extraArgs['domain'] = items.domain
extraArgs['tempStatusPath'] = tempStatusPath
extraArgs['data'] = PlanConfig['data']
extraArgs['emails'] = PlanConfig['emails']
extraArgs['databases'] = PlanConfig['databases']
bu = backupUtilities(extraArgs)
result = bu.CloudBackups()

finalResult = open(tempStatusPath, 'r').read()

if result[0] == 1:
key = plan.name + '/' + runTime + '/' + result[1]
client.upload_file(
result[1],
bucketName,
key,
)

command = 'rm -f ' + result[1]
ProcessUtilities.executioner(command)

BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup successful for ' + items.domain + '.').save()
else:
BackupLogs(owner=plan, level='ERROR', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup failed for ' + items.domain + '. Error: ' + finalResult).save()

plan.lastRun = runTime
plan.save()

BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup Process Finished.').save()
except BaseException as msg:
logging.writeToFile(str(msg) + ' [S3Backups.runBackupPlan]')
plan = BackupPlan.objects.get(name='hi')
BackupLogs(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='ERROR', msg=str(msg)).save()



def main():
Expand All @@ -602,6 +686,7 @@ def main():
IncScheduler.git(args.function)
IncScheduler.checkDiskUsage()
IncScheduler.startNormalBackups(args.function)
IncScheduler.forceRunAWSBackup()


if __name__ == "__main__":
Expand Down
21 changes: 16 additions & 5 deletions plogical/backupUtilities.py
Expand Up @@ -1438,7 +1438,7 @@ def CloudBackups(self):
if result[0] == 0:
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Failed to generate backups for data. Error: %s. [404], 0' % (result[1] ))
return 0
return 0, self.BackupPath

logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Data backup successfully generated,30')
Expand All @@ -1450,7 +1450,7 @@ def CloudBackups(self):
if result[0] == 0:
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Failed to generate backups for emails. Error: %s. [404], 0' % (result[1] ))
return 0
return 0, self.BackupPath

logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Emails backup successfully generated,60')
Expand All @@ -1462,7 +1462,7 @@ def CloudBackups(self):
if result[0] == 0:
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Failed to generate backups for databases. Error: %s. [404], 0' % (result[1] ))
return 0
return 0, self.BackupPath

logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Databases backups successfully generated,30')
Expand All @@ -1476,7 +1476,17 @@ def CloudBackups(self):
command = 'rm -rf %s' % (self.BackupPath)
ProcessUtilities.executioner(command)

logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Completed [200].')
finalPath = '%s.tar.gz' % (self.BackupPath)

command = 'chown cyberpanel:cyberpanel %s' % (finalPath)
ProcessUtilities.executioner(command)

command = 'chmod 600:600 %s' % (finalPath)
ProcessUtilities.executioner(command)

logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Completed [200].' % (self.BackupPath))

return 1, self.BackupPath + '.tar.gz'

## Restore functions

Expand Down Expand Up @@ -1605,7 +1615,8 @@ def SubmitCloudBackupRestore(self):
mysqlUtilities.mysqlUtilities.restoreDatabaseBackup(db['databaseName'], self.databasesPath, db['password'])



command = 'rm -rf %s' % (self.extractedPath)
ProcessUtilities.executioner(command)

logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Completed [200].')

Expand Down
4 changes: 2 additions & 2 deletions requirments.txt
Expand Up @@ -5,8 +5,8 @@ Babel==2.8.0
backports.ssl-match-hostname==3.7.0.1
bcrypt==3.2.0
beautifulsoup4==4.9.3
boto3==1.16.13
botocore==1.19.13
boto3==1.19.30
botocore==1.19.30
cachetools==4.1.1
certifi==2020.11.8
cffi==1.14.3
Expand Down
1 change: 1 addition & 0 deletions s3Backups/models.py
Expand Up @@ -13,6 +13,7 @@ class BackupPlan(models.Model):
retention = models.IntegerField()
type = models.CharField(max_length=5, default='AWS')
lastRun = models.CharField(max_length=50, default='0:0:0')
config = models.TextField()

class WebsitesInPlan(models.Model):
owner = models.ForeignKey(BackupPlan,on_delete=models.CASCADE)
Expand Down
86 changes: 74 additions & 12 deletions s3Backups/s3Backups.py
Expand Up @@ -176,8 +176,9 @@ def fetchAWSKeys(self):

aws_access_key_id = data[1].split(' ')[2].strip(' ').strip('\n')
aws_secret_access_key = data[2].split(' ')[2].strip(' ').strip('\n')
region = data[3].split(' ')[2].strip(' ').strip('\n')

return aws_access_key_id, aws_secret_access_key
return aws_access_key_id, aws_secret_access_key, region

def fetchBuckets(self):
try:
Expand All @@ -191,12 +192,13 @@ def fetchBuckets(self):
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')


aws_access_key_id, aws_secret_access_key = self.fetchAWSKeys()
aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys()

s3 = boto3.resource(
's3',
aws_access_key_id = aws_access_key_id,
aws_secret_access_key = aws_secret_access_key
aws_secret_access_key = aws_secret_access_key,
region_name=region
)

json_data = "["
Expand Down Expand Up @@ -232,8 +234,28 @@ def createPlan(self):

admin = Administrator.objects.get(pk=userID)

## What to backup

WTB = {}
try:
WTB['data'] = int(self.data['data'])
except:
WTB['data'] = 0

try:
WTB['databases'] = int(self.data['databases'])
except:
WTB['databases'] = 0

try:
WTB['emails'] = int(self.data['emails'])
except:
WTB['emails'] = 0

###

newPlan = BackupPlan(owner=admin, name=self.data['planName'].replace(' ', ''), freq=self.data['frequency'],
retention=self.data['retenion'], bucket=self.data['bucketName'])
retention=self.data['retenion'], bucket=self.data['bucketName'], config=json.dumps(WTB))
newPlan.save()

for items in self.data['websitesInPlan']:
Expand Down Expand Up @@ -263,12 +285,16 @@ def fetchBackupPlans(self):
checker = 0

for plan in admin.backupplan_set.all():
config = json.loads(plan.config)
dic = {
'name': plan.name,
'bucket': plan.bucket,
'freq': plan.freq,
'retention': plan.retention,
'lastRun': plan.lastRun,
'data': config['data'],
'databases': config['databases'],
'emails': config['emails'],
}

if checker == 0:
Expand Down Expand Up @@ -374,9 +400,28 @@ def savePlanChanges(self):

changePlan = BackupPlan.objects.get(name=self.data['planName'])

## What to backup

WTB = {}
try:
WTB['data'] = int(self.data['data'])
except:
WTB['data'] = 0

try:
WTB['databases'] = int(self.data['databases'])
except:
WTB['databases'] = 0

try:
WTB['emails'] = int(self.data['emails'])
except:
WTB['emails'] = 0

changePlan.bucket = self.data['bucketName']
changePlan.freq = self.data['frequency']
changePlan.retention = self.data['retention']
changePlan.config = json.dumps(WTB)

changePlan.save()

Expand Down Expand Up @@ -478,15 +523,17 @@ def forceRunAWSBackup(self):
try:

plan = BackupPlan.objects.get(name=self.data['planName'])
logging.writeToFile(plan.config)
bucketName = plan.bucket.strip('\n').strip(' ')
runTime = time.strftime("%d:%m:%Y")

aws_access_key_id, aws_secret_access_key = self.fetchAWSKeys()
aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys()

client = boto3.client(
's3',
aws_access_key_id = aws_access_key_id,
aws_secret_access_key = aws_secret_access_key
aws_secret_access_key = aws_secret_access_key,
#region_name=region
)


Expand Down Expand Up @@ -533,25 +580,40 @@ def forceRunAWSBackup(self):
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Starting backup process..').save()

PlanConfig = json.loads(plan.config)

for items in plan.websitesinplan_set.all():
result = self.createBackup(items.domain)
if result[0]:
key = plan.name + '/' + runTime + '/' + result[1].split('/')[-1] + ".tar.gz"

from plogical.backupUtilities import backupUtilities
tempStatusPath = "/home/cyberpanel/" + str(randint(1000, 9999))
extraArgs = {}
extraArgs['domain'] = items.domain
extraArgs['tempStatusPath'] = tempStatusPath
extraArgs['data'] = PlanConfig['data']
extraArgs['emails'] = PlanConfig['emails']
extraArgs['databases'] = PlanConfig['databases']
bu = backupUtilities(extraArgs)
result = bu.CloudBackups()

finalResult = open(tempStatusPath, 'r').read()

if result[0] == 1:
key = plan.name + '/' + runTime + '/' + result[1]
client.upload_file(
result[1] + ".tar.gz",
result[1],
bucketName,
key,
Config=config,
)

command = 'rm -f ' + result[1] + ".tar.gz"
command = 'rm -f ' + result[1]
ProcessUtilities.executioner(command)

BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup successful for ' + items.domain + '.').save()
else:
BackupLogs(owner=plan, level='ERROR', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup failed for ' + items.domain + '. Error: ' + result[1]).save()
msg='Backup failed for ' + items.domain + '. Error: ' + finalResult).save()

plan.lastRun = runTime
plan.save()
Expand Down

0 comments on commit 43d514c

Please sign in to comment.