Skip to content

Commit 43d514c

Browse files
committed
cloud backups to aws
1 parent e1f1f52 commit 43d514c

File tree

6 files changed

+184
-22
lines changed

6 files changed

+184
-22
lines changed

cloudAPI/cloudManager.py

+5-2
Original file line numberDiff line numberDiff line change
@@ -961,9 +961,12 @@ def fetchBackupLogs(self, request):
961961

962962
def forceRunAWSBackup(self, request):
963963
try:
964+
964965
request.session['userID'] = self.admin.pk
965-
s3 = S3Backups(request, self.data, 'forceRunAWSBackup')
966-
s3.start()
966+
967+
execPath = "/usr/local/CyberCP/bin/python /usr/local/CyberCP/IncBackups/IncScheduler.py Daily"
968+
ProcessUtilities.popenExecutioner(execPath)
969+
967970
return self.ajaxPre(1, None)
968971
except BaseException as msg:
969972
return self.ajaxPre(0, str(msg))

plogical/IncScheduler.py

+86-1
Original file line numberDiff line numberDiff line change
@@ -18,11 +18,15 @@
1818
from googleapiclient.http import MediaFileUpload
1919
from plogical.backupSchedule import backupSchedule
2020
import requests
21-
from websiteFunctions.models import NormalBackupJobs, NormalBackupSites, NormalBackupDests, NormalBackupJobLogs
21+
from websiteFunctions.models import NormalBackupJobs, NormalBackupJobLogs
22+
2223
try:
24+
from s3Backups.models import BackupPlan, BackupLogs
25+
import boto3
2326
from plogical.virtualHostUtilities import virtualHostUtilities
2427
from plogical.mailUtilities import mailUtilities
2528
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
29+
from plogical.processUtilities import ProcessUtilities
2630
except:
2731
pass
2832

@@ -589,6 +593,86 @@ def startNormalBackups(type):
589593
backupjob.config = json.dumps(jobConfig)
590594
backupjob.save()
591595

596+
@staticmethod
597+
def fetchAWSKeys():
598+
path = '/home/cyberpanel/.aws'
599+
credentials = path + '/credentials'
600+
601+
data = open(credentials, 'r').readlines()
602+
603+
aws_access_key_id = data[1].split(' ')[2].strip(' ').strip('\n')
604+
aws_secret_access_key = data[2].split(' ')[2].strip(' ').strip('\n')
605+
region = data[3].split(' ')[2].strip(' ').strip('\n')
606+
607+
return aws_access_key_id, aws_secret_access_key, region
608+
609+
@staticmethod
610+
def forceRunAWSBackup():
611+
try:
612+
613+
plan = BackupPlan.objects.get(name='hi')
614+
bucketName = plan.bucket.strip('\n').strip(' ')
615+
runTime = time.strftime("%d:%m:%Y")
616+
617+
aws_access_key_id, aws_secret_access_key, region = IncScheduler.fetchAWSKeys()
618+
619+
client = boto3.client(
620+
's3',
621+
aws_access_key_id = aws_access_key_id,
622+
aws_secret_access_key = aws_secret_access_key,
623+
#region_name=region
624+
)
625+
626+
##
627+
628+
629+
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
630+
msg='Starting backup process..').save()
631+
632+
PlanConfig = json.loads(plan.config)
633+
634+
for items in plan.websitesinplan_set.all():
635+
636+
from plogical.backupUtilities import backupUtilities
637+
tempStatusPath = "/home/cyberpanel/" + str(randint(1000, 9999))
638+
extraArgs = {}
639+
extraArgs['domain'] = items.domain
640+
extraArgs['tempStatusPath'] = tempStatusPath
641+
extraArgs['data'] = PlanConfig['data']
642+
extraArgs['emails'] = PlanConfig['emails']
643+
extraArgs['databases'] = PlanConfig['databases']
644+
bu = backupUtilities(extraArgs)
645+
result = bu.CloudBackups()
646+
647+
finalResult = open(tempStatusPath, 'r').read()
648+
649+
if result[0] == 1:
650+
key = plan.name + '/' + runTime + '/' + result[1]
651+
client.upload_file(
652+
result[1],
653+
bucketName,
654+
key,
655+
)
656+
657+
command = 'rm -f ' + result[1]
658+
ProcessUtilities.executioner(command)
659+
660+
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
661+
msg='Backup successful for ' + items.domain + '.').save()
662+
else:
663+
BackupLogs(owner=plan, level='ERROR', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
664+
msg='Backup failed for ' + items.domain + '. Error: ' + finalResult).save()
665+
666+
plan.lastRun = runTime
667+
plan.save()
668+
669+
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
670+
msg='Backup Process Finished.').save()
671+
except BaseException as msg:
672+
logging.writeToFile(str(msg) + ' [S3Backups.runBackupPlan]')
673+
plan = BackupPlan.objects.get(name='hi')
674+
BackupLogs(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='ERROR', msg=str(msg)).save()
675+
592676

593677

594678
def main():
@@ -602,6 +686,7 @@ def main():
602686
IncScheduler.git(args.function)
603687
IncScheduler.checkDiskUsage()
604688
IncScheduler.startNormalBackups(args.function)
689+
IncScheduler.forceRunAWSBackup()
605690

606691

607692
if __name__ == "__main__":

plogical/backupUtilities.py

+16-5
Original file line numberDiff line numberDiff line change
@@ -1438,7 +1438,7 @@ def CloudBackups(self):
14381438
if result[0] == 0:
14391439
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
14401440
'Failed to generate backups for data. Error: %s. [404], 0' % (result[1] ))
1441-
return 0
1441+
return 0, self.BackupPath
14421442

14431443
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
14441444
'Data backup successfully generated,30')
@@ -1450,7 +1450,7 @@ def CloudBackups(self):
14501450
if result[0] == 0:
14511451
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
14521452
'Failed to generate backups for emails. Error: %s. [404], 0' % (result[1] ))
1453-
return 0
1453+
return 0, self.BackupPath
14541454

14551455
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
14561456
'Emails backup successfully generated,60')
@@ -1462,7 +1462,7 @@ def CloudBackups(self):
14621462
if result[0] == 0:
14631463
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
14641464
'Failed to generate backups for databases. Error: %s. [404], 0' % (result[1] ))
1465-
return 0
1465+
return 0, self.BackupPath
14661466

14671467
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
14681468
'Databases backups successfully generated,30')
@@ -1476,7 +1476,17 @@ def CloudBackups(self):
14761476
command = 'rm -rf %s' % (self.BackupPath)
14771477
ProcessUtilities.executioner(command)
14781478

1479-
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Completed [200].')
1479+
finalPath = '%s.tar.gz' % (self.BackupPath)
1480+
1481+
command = 'chown cyberpanel:cyberpanel %s' % (finalPath)
1482+
ProcessUtilities.executioner(command)
1483+
1484+
command = 'chmod 600:600 %s' % (finalPath)
1485+
ProcessUtilities.executioner(command)
1486+
1487+
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Completed [200].' % (self.BackupPath))
1488+
1489+
return 1, self.BackupPath + '.tar.gz'
14801490

14811491
## Restore functions
14821492

@@ -1605,7 +1615,8 @@ def SubmitCloudBackupRestore(self):
16051615
mysqlUtilities.mysqlUtilities.restoreDatabaseBackup(db['databaseName'], self.databasesPath, db['password'])
16061616

16071617

1608-
1618+
command = 'rm -rf %s' % (self.extractedPath)
1619+
ProcessUtilities.executioner(command)
16091620

16101621
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Completed [200].')
16111622

requirments.txt

+2-2
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@ Babel==2.8.0
55
backports.ssl-match-hostname==3.7.0.1
66
bcrypt==3.2.0
77
beautifulsoup4==4.9.3
8-
boto3==1.16.13
9-
botocore==1.19.13
8+
boto3==1.19.30
9+
botocore==1.19.30
1010
cachetools==4.1.1
1111
certifi==2020.11.8
1212
cffi==1.14.3

s3Backups/models.py

+1
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ class BackupPlan(models.Model):
1313
retention = models.IntegerField()
1414
type = models.CharField(max_length=5, default='AWS')
1515
lastRun = models.CharField(max_length=50, default='0:0:0')
16+
config = models.TextField()
1617

1718
class WebsitesInPlan(models.Model):
1819
owner = models.ForeignKey(BackupPlan,on_delete=models.CASCADE)

s3Backups/s3Backups.py

+74-12
Original file line numberDiff line numberDiff line change
@@ -176,8 +176,9 @@ def fetchAWSKeys(self):
176176

177177
aws_access_key_id = data[1].split(' ')[2].strip(' ').strip('\n')
178178
aws_secret_access_key = data[2].split(' ')[2].strip(' ').strip('\n')
179+
region = data[3].split(' ')[2].strip(' ').strip('\n')
179180

180-
return aws_access_key_id, aws_secret_access_key
181+
return aws_access_key_id, aws_secret_access_key, region
181182

182183
def fetchBuckets(self):
183184
try:
@@ -191,12 +192,13 @@ def fetchBuckets(self):
191192
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
192193

193194

194-
aws_access_key_id, aws_secret_access_key = self.fetchAWSKeys()
195+
aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys()
195196

196197
s3 = boto3.resource(
197198
's3',
198199
aws_access_key_id = aws_access_key_id,
199-
aws_secret_access_key = aws_secret_access_key
200+
aws_secret_access_key = aws_secret_access_key,
201+
region_name=region
200202
)
201203

202204
json_data = "["
@@ -232,8 +234,28 @@ def createPlan(self):
232234

233235
admin = Administrator.objects.get(pk=userID)
234236

237+
## What to backup
238+
239+
WTB = {}
240+
try:
241+
WTB['data'] = int(self.data['data'])
242+
except:
243+
WTB['data'] = 0
244+
245+
try:
246+
WTB['databases'] = int(self.data['databases'])
247+
except:
248+
WTB['databases'] = 0
249+
250+
try:
251+
WTB['emails'] = int(self.data['emails'])
252+
except:
253+
WTB['emails'] = 0
254+
255+
###
256+
235257
newPlan = BackupPlan(owner=admin, name=self.data['planName'].replace(' ', ''), freq=self.data['frequency'],
236-
retention=self.data['retenion'], bucket=self.data['bucketName'])
258+
retention=self.data['retenion'], bucket=self.data['bucketName'], config=json.dumps(WTB))
237259
newPlan.save()
238260

239261
for items in self.data['websitesInPlan']:
@@ -263,12 +285,16 @@ def fetchBackupPlans(self):
263285
checker = 0
264286

265287
for plan in admin.backupplan_set.all():
288+
config = json.loads(plan.config)
266289
dic = {
267290
'name': plan.name,
268291
'bucket': plan.bucket,
269292
'freq': plan.freq,
270293
'retention': plan.retention,
271294
'lastRun': plan.lastRun,
295+
'data': config['data'],
296+
'databases': config['databases'],
297+
'emails': config['emails'],
272298
}
273299

274300
if checker == 0:
@@ -374,9 +400,28 @@ def savePlanChanges(self):
374400

375401
changePlan = BackupPlan.objects.get(name=self.data['planName'])
376402

403+
## What to backup
404+
405+
WTB = {}
406+
try:
407+
WTB['data'] = int(self.data['data'])
408+
except:
409+
WTB['data'] = 0
410+
411+
try:
412+
WTB['databases'] = int(self.data['databases'])
413+
except:
414+
WTB['databases'] = 0
415+
416+
try:
417+
WTB['emails'] = int(self.data['emails'])
418+
except:
419+
WTB['emails'] = 0
420+
377421
changePlan.bucket = self.data['bucketName']
378422
changePlan.freq = self.data['frequency']
379423
changePlan.retention = self.data['retention']
424+
changePlan.config = json.dumps(WTB)
380425

381426
changePlan.save()
382427

@@ -478,15 +523,17 @@ def forceRunAWSBackup(self):
478523
try:
479524

480525
plan = BackupPlan.objects.get(name=self.data['planName'])
526+
logging.writeToFile(plan.config)
481527
bucketName = plan.bucket.strip('\n').strip(' ')
482528
runTime = time.strftime("%d:%m:%Y")
483529

484-
aws_access_key_id, aws_secret_access_key = self.fetchAWSKeys()
530+
aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys()
485531

486532
client = boto3.client(
487533
's3',
488534
aws_access_key_id = aws_access_key_id,
489-
aws_secret_access_key = aws_secret_access_key
535+
aws_secret_access_key = aws_secret_access_key,
536+
#region_name=region
490537
)
491538

492539

@@ -533,25 +580,40 @@ def forceRunAWSBackup(self):
533580
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
534581
msg='Starting backup process..').save()
535582

583+
PlanConfig = json.loads(plan.config)
584+
536585
for items in plan.websitesinplan_set.all():
537-
result = self.createBackup(items.domain)
538-
if result[0]:
539-
key = plan.name + '/' + runTime + '/' + result[1].split('/')[-1] + ".tar.gz"
586+
587+
from plogical.backupUtilities import backupUtilities
588+
tempStatusPath = "/home/cyberpanel/" + str(randint(1000, 9999))
589+
extraArgs = {}
590+
extraArgs['domain'] = items.domain
591+
extraArgs['tempStatusPath'] = tempStatusPath
592+
extraArgs['data'] = PlanConfig['data']
593+
extraArgs['emails'] = PlanConfig['emails']
594+
extraArgs['databases'] = PlanConfig['databases']
595+
bu = backupUtilities(extraArgs)
596+
result = bu.CloudBackups()
597+
598+
finalResult = open(tempStatusPath, 'r').read()
599+
600+
if result[0] == 1:
601+
key = plan.name + '/' + runTime + '/' + result[1]
540602
client.upload_file(
541-
result[1] + ".tar.gz",
603+
result[1],
542604
bucketName,
543605
key,
544606
Config=config,
545607
)
546608

547-
command = 'rm -f ' + result[1] + ".tar.gz"
609+
command = 'rm -f ' + result[1]
548610
ProcessUtilities.executioner(command)
549611

550612
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
551613
msg='Backup successful for ' + items.domain + '.').save()
552614
else:
553615
BackupLogs(owner=plan, level='ERROR', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
554-
msg='Backup failed for ' + items.domain + '. Error: ' + result[1]).save()
616+
msg='Backup failed for ' + items.domain + '. Error: ' + finalResult).save()
555617

556618
plan.lastRun = runTime
557619
plan.save()

0 commit comments

Comments
 (0)