Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
branch: master
Fetching contributors…

Cannot retrieve contributors at this time

224 lines (185 sloc) 8.201 kb
#!/usr/bin/python
#import md5
from hashlib import md5
import os, sys
from ftplib import FTP
import zipfile
import time
"""
TODO: diff for mysql dump
Notes
- root directory incremental backup includes an archive of *only* the files that have changed <-- can be changed
- directory incremental backups include an archive of the entire directory <-- can be changed
- Both things that I've mentioned so far are just to give you an example of how this could work as well as serve somewhat of a purpose... I'll gladly add behavior
if this does not suite your needs.
- deleting the hashmap file on the remote server will cause a full backup to occur, the hash map is used to determine what has changed and what is new.
- each backup is stored in its own unique directory named according to the following convention: mm-dd-yyyy-hh-mm-ss. This convention can be changed however please note that
the script will not overwrite existing archives if they exist.
- remoteWorkingDirectory should be a fully qualified path
- FTP host should be a remote host and not localhost :)
- This script can be easily changed to split zip files or tar files
- this script uses, the md5sum, diff, and tar command line utilities via os.system() and redirection of stdout to files. It seems to clean up after itself.
Questions/Bugs? erratic@devel.ws (Paige Adele Thompson)
"""
class FTPClient:
""" user editable variables """
username = "adele"
password = ""
host = "localhost"
remoteWorkingDirectory = "/home/adele"
remoteBackupStoreDirectory = "ServerBackup-"+ time.localtime().tm_year.__str__() + "-" + time.localtime().tm_mon.__str__() + "-" + time.localtime().tm_mday.__str__() + "-"+time.localtime().tm_hour.__str__() + "-" + time.localtime().tm_min.__str__() + "-" + time.localtime().tm_sec.__str__() + "/"
""" app variables """
ftp = None
HashMapExists = False
def GetHashFile(self):
if os.path.exists("OldHashMap.md5") == True:
os.remove("OldHashMap.md5")
outfile = open("OldHashMap.md5", "a")
# use a lambda to add newlines to the lines read from the server
try:
self. ftp.retrlines("RETR " + "HashMap.md5", lambda s, w=outfile.write: w(s+"\n"))
self.HashMapExists = True
except:
return;
def ChangeToWorkingDirectory(self):
try:
self.ftp.cwd(self.remoteWorkingDirectory)
except:
self.ftp.mkd(self.remoteWorkingDirectory)
self.ftp.cwd(self.remoteWorkingDirectory)
def ChangeToBackupStoreDirectory(self):
try:
self.ftp.cwd(self.remoteBackupStoreDirectory)
except:
self.ftp.mkd(self.remoteBackupStoreDirectory)
self.ftp.cwd(self.remoteBackupStoreDirectory)
def OpenConnection(self):
self.ftp = FTP(self.host, self.username, self.password)
self.ftp.cwd(self.remoteWorkingDirectory)
def UploadFile(self, Filename):
try:
"""upload the file"""
self.ftp.storbinary("STOR " + Filename, open(Filename, "rb"),
1024)
except Exception as e:
self.OpenConnection()
"""
recursion
maybe... not the best way to do this
"""
self.UploadFile(Filename)
class BackupScript:
""" user editable variables """
mysqlUser = "root"
mysqlPassword = ""
mysqlServer = "localhost"
contentRoot = "public_html/"
""" app variables """
ftpClient = FTPClient()
HashMapDelta = None
BackedUpDeltaDirectories = dict()
def DeleteFile(self, Filename):
"""
Delete the archive
"""
os.remove(Filename)
def BackupDatabases(self):
os.system("mysqldump -u " +self.mysqlUser+" --password=\""+
self.mysqlPassword+"\" --all-databases > mysqlTemp.sql")
SqlZipFile = "mysql-" + time.localtime().tm_year.__str__() + "-" + time.localtime().tm_mon.__str__() + "-" + time.localtime().tm_mday.__str__() + ".zip"
SqlFiles = zipfile.ZipFile(SqlZipFile, "w")
SqlFiles.write("mysqlTemp.sql", os.path.basename("mysqlTemp.sql"), zipfile.ZIP_DEFLATED)
SqlFiles.close()
self.DeleteFile("mysqlTemp.sql")
self.ftpClient.UploadFile(SqlZipFile)
self.DeleteFile(SqlZipFile)
def BackupDirectory(self, Directory):
DirectoryArchiveName = Directory + "-" + time.localtime().tm_year.__str__() + "-" + time.localtime().tm_mon.__str__() + "-" + time.localtime().tm_mday.__str__() + "."
os.system("tar -cvf " + DirectoryArchiveName + "tar " + self.contentRoot+Directory)
DirectoryFiles = zipfile.ZipFile(DirectoryArchiveName + "zip", "w")
DirectoryFiles.write(DirectoryArchiveName+"tar", DirectoryArchiveName +"tar", zipfile.ZIP_DEFLATED)
DirectoryFiles.close()
self.DeleteFile(DirectoryArchiveName + "tar")
self.ftpClient.UploadFile(DirectoryArchiveName + "zip")
self.DeleteFile(DirectoryArchiveName + "zip")
def IsDeltaPath(self, Path):
for lineA in self.HashMapDelta:
if Path == lineA and self.BackedUpDeltaDirectories.has_key(Path) != True:
return True
if os.path.isdir(Path):
blah = lineA.split("/")
if Path == blah[0]+"/"+ blah[1]:
return True
return False
def BackupContent(self):
rootDirectoryListing = os.listdir(self.contentRoot)
RootDirectoryArchiveName = "rootDirectory-" + time.localtime().tm_year.__str__() + "-" + time.localtime().tm_mon.__str__() + "-" + time.localtime().tm_mday.__str__() + ".zip"
RootDirectoryFiles = None
for entity in rootDirectoryListing:
if os.path.isdir(self.contentRoot + entity) == True:
if self.HashMapDelta !=None:
if self.IsDeltaPath(self.contentRoot+entity) == True:
self.BackupDirectory(entity)
self.BackedUpDeltaDirectories[self.contentRoot+entity] = True
else:
continue
else:
self.BackupDirectory(entity)
else:
if self.HashMapDelta != None:
if self.IsDeltaPath(self.contentRoot+entity) == True:
if RootDirectoryFiles == None:
RootDirectoryFiles = zipfile.ZipFile(RootDirectoryArchiveName, "w")
RootDirectoryFiles.write(self.contentRoot+entity, self.contentRoot+entity, zipfile.ZIP_DEFLATED)
self.BackedUpDeltaDirectories[self.contentRoot+entity] = True
else:
continue
else:
if RootDirectoryFiles == None:
RootDirectoryFiles = zipfile.ZipFile(RootDirectoryArchiveName, "w")
RootDirectoryFiles.write(self.contentRoot+entity, self.contentRoot+entity, zipfile.ZIP_DEFLATED)
self.BackedUpDeltaDirectories[self.contentRoot+entity] = True
if RootDirectoryFiles != None:
RootDirectoryFiles.close()
self.ftpClient.UploadFile(RootDirectoryArchiveName)
self.DeleteFile(RootDirectoryArchiveName)
def GenerateContentHashMap(self, Directory):
rootDirectoryListing = os.listdir(Directory)
for entity in rootDirectoryListing:
if os.path.isdir(Directory + "/" + entity) == True:
self.GenerateContentHashMap(Directory+"/"+entity)
else:
os.system("md5sum " + "\""+Directory + "/" + entity + "\" >> HashMap.md5")
def CompareHashFiles(self):
self.HashMapDelta = list()
if os.path.exists("HashMapDiff.md5") == True:
self.DeleteFile("HashMapDiff.md5")
os.system("diff OldHashMap.md5 HashMap.md5 > HashMapDiff.md5")
stuff = open("HashMapDiff.md5").readlines()
for lineA in stuff:
if lineA != "":
if len(lineA.split(" ", 2)) == 3:
self.HashMapDelta.append(lineA.split(" ")[3].strip("\n"))
if len(self.HashMapDelta) <= 0:
self.HashMapDelta = None
def RunContentBackup(self):
if os.path.exists("HashMap.md5") == True:
self.DeleteFile("HashMap.md5")
self.GenerateContentHashMap(self.contentRoot.strip("/"))
self.ftpClient.OpenConnection()
self.ftpClient.GetHashFile()
self.ftpClient.ChangeToBackupStoreDirectory()
if self.ftpClient.HashMapExists == True:
self.CompareHashFiles()
if self.HashMapDelta == None:
self.ftpClient.ChangeToWorkingDirectory()
return
self.BackupContent()
self.ftpClient.ChangeToWorkingDirectory()
self.ftpClient.UploadFile("HashMap.md5")
def RunDatabaseBackup(self):
self.ftpClient.ChangeToBackupStoreDirectory()
self.BackupDatabases()
backup = BackupScript()
backup.RunContentBackup()
backup.RunDatabaseBackup()
Jump to Line
Something went wrong with that request. Please try again.