Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
restore s3 backups
  • Loading branch information
usmannasir committed Dec 11, 2020
1 parent f0c1dfb commit a9820ac
Show file tree
Hide file tree
Showing 5 changed files with 401 additions and 111 deletions.
103 changes: 103 additions & 0 deletions cloudAPI/cloudManager.py
Expand Up @@ -1791,3 +1791,106 @@ def SubmitCloudBackupRestore(self):

except BaseException as msg:
return self.ajaxPre(0, str(msg))

def fetchAWSKeys(self):
path = '/home/cyberpanel/.aws'
credentials = path + '/credentials'

data = open(credentials, 'r').readlines()

aws_access_key_id = data[1].split(' ')[2].strip(' ').strip('\n')
aws_secret_access_key = data[2].split(' ')[2].strip(' ').strip('\n')
region = data[3].split(' ')[2].strip(' ').strip('\n')

return aws_access_key_id, aws_secret_access_key, region

def getCurrentS3Backups(self):
try:

import boto3
from s3Backups.models import BackupPlan, BackupLogs
plan = BackupPlan.objects.get(name=self.data['planName'])

aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys()

s3 = boto3.resource(
's3',
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key
)
bucket = s3.Bucket(plan.bucket)
key = '%s/%s/' % (plan.name, self.data['domainName'])

backups = []

for file in bucket.objects.filter(Prefix=key):
backups.append({'key': file.key, 'size': file.size})

json_data = "["
checker = 0

counter = 1
for items in backups:

dic = {'id': counter,
'file': items['key'],
'size': items['size'],
}
counter = counter + 1

if checker == 0:
json_data = json_data + json.dumps(dic)
checker = 1
else:
json_data = json_data + ',' + json.dumps(dic)

json_data = json_data + ']'
final_json = json.dumps({'status': 1, 'fetchStatus': 1, 'error_message': "None", "data": json_data})
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'status': 0, 'fetchStatus': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)

def deleteS3Backup(self):
try:

import boto3
from s3Backups.models import BackupPlan, BackupLogs
plan = BackupPlan.objects.get(name=self.data['planName'])

aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys()

s3 = boto3.resource(
's3',
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key
)
s3.Object(plan.bucket, self.data['backupFile']).delete()

final_json = json.dumps({'status': 1, 'fetchStatus': 1, 'error_message': "None"})
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'status': 0, 'fetchStatus': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)

def SubmitS3BackupRestore(self):
try:

tempStatusPath = "/home/cyberpanel/" + str(randint(1000, 9999))

writeToFile = open(tempStatusPath, 'w')
writeToFile.write('Starting..,0')
writeToFile.close()

execPath = "/usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/backupUtilities.py"
execPath = execPath + " SubmitS3BackupRestore --backupDomain %s --backupFile '%s' --tempStoragePath %s --planName %s" % (self.data['domain'], self.data['backupFile'], tempStatusPath, self.data['planName'])
ProcessUtilities.popenExecutioner(execPath)

final_dic = {'status': 1, 'tempStatusPath': tempStatusPath}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)

except BaseException as msg:
return self.ajaxPre(0, str(msg))
6 changes: 6 additions & 0 deletions cloudAPI/views.py
Expand Up @@ -63,6 +63,12 @@ def router(request):
return cm.deleteCloudBackup()
elif controller == 'SubmitCloudBackupRestore':
return cm.SubmitCloudBackupRestore()
elif controller == 'getCurrentS3Backups':
return cm.getCurrentS3Backups()
elif controller == 'deleteS3Backup':
return cm.deleteS3Backup()
elif controller == 'SubmitS3BackupRestore':
return cm.SubmitS3BackupRestore()
elif controller == 'fetchWebsites':
return cm.fetchWebsites()
elif controller == 'fetchWebsiteDataJSON':
Expand Down
45 changes: 23 additions & 22 deletions plogical/IncScheduler.py
Expand Up @@ -611,15 +611,37 @@ def fetchAWSKeys():
def forceRunAWSBackup(planName):
try:


plan = BackupPlan.objects.get(name=planName)
bucketName = plan.bucket.strip('\n').strip(' ')
runTime = time.strftime("%d:%m:%Y")

config = TransferConfig(multipart_threshold=1024 * 25, max_concurrency=10,
multipart_chunksize=1024 * 25, use_threads=True)

##

aws_access_key_id, aws_secret_access_key, region = IncScheduler.fetchAWSKeys()

ts = time.time()
retentionSeconds = 86400 * plan.retention
s3 = boto3.resource(
's3',
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key
)
bucket = s3.Bucket(plan.bucket)

for file in bucket.objects.all():
result = float(ts - file.last_modified.timestamp())
if result > retentionSeconds:
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='File %s expired and deleted according to your retention settings.' % (
file.key)).save()
file.delete()

###

client = boto3.client(
's3',
aws_access_key_id = aws_access_key_id,
Expand All @@ -629,7 +651,6 @@ def forceRunAWSBackup(planName):

##


BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Starting backup process..').save()

Expand All @@ -652,7 +673,7 @@ def forceRunAWSBackup(planName):
finalResult = open(tempStatusPath, 'r').read()

if result == 1:
key = plan.name + '/' + runTime + '/' + fileName.split('/')[-1]
key = plan.name + '/' + items.domain + '/' + fileName.split('/')[-1]
client.upload_file(
fileName,
bucketName,
Expand All @@ -675,26 +696,6 @@ def forceRunAWSBackup(planName):
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup Process Finished.').save()

###

s3 = boto3.resource(
's3',
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
region_name=region
)

ts = time.time()

retentionSeconds = 86400 * plan.retention

for bucket in s3.buckets.all():
if bucket.name == plan.bucket:
for file in bucket.objects.all():
result = float(ts - file.last_modified.timestamp())
if result > retentionSeconds:
file.delete()
break

except BaseException as msg:
logging.writeToFile(str(msg) + ' [S3Backups.runBackupPlan]')
Expand Down

0 comments on commit a9820ac

Please sign in to comment.