Permalink
Browse files

refactored to allow arbitrary installation directory on node; removed…

… all 'sudo' requirements; change to user based crontab scheduling
  • Loading branch information...
1 parent 3cbc94b commit 9dea4183db7724af08419ea0971378fb3fea06a2 Adam Bachman committed Feb 27, 2009
Showing with 347 additions and 244 deletions.
  1. +20 −4 Capfile
  2. +20 −18 audit/generate_feeds.rb
  3. +1 −0 config/sample-node.yml
  4. +60 −36 dist/backup-runner.rb
  5. +101 −74 dist/install.sh
  6. +1 −1 dist/mysql-dump.sh
  7. +1 −1 dist/tar-dump.sh
  8. +28 −28 dist/uninstall.sh
  9. +24 −5 lib/confighandler.rb
  10. +45 −15 lib/connection_tasks.rb
  11. +33 −50 lib/node_tasks.rb
  12. +2 −3 lib/server_info_tasks.rb
  13. +11 −9 readme.md
View
24 Capfile
@@ -28,7 +28,7 @@ after "dist:uninstall", "dist:cleanup"
namespace :dist do
task :send_pkg, :roles => :node do
`tar zcvf dist.tar.gz dist/`
- upload 'dist.tar.gz', "/home/#{node_server['username']}/dist.tar.gz"
+ upload 'dist.tar.gz', "dist.tar.gz"
run "tar xzvf dist.tar.gz"
end
@@ -42,18 +42,34 @@ namespace :dist do
task :install, :roles => :node do
r_h = sprintf "%02i", (rand(4) + 1)
r_m = sprintf "%02i", rand(60)
+ unless node_server['install_directory']
+ installdir = Capistrano::CLI.ui.ask("what is the remote install directory? ") {|q| q.default = '~/backup-toolkit'}
+ else
+ installdir = node_server['install_directory']
+ end
run_time = Capistrano::CLI.ui.ask("what time would you like to run backups (hh:mm)? [#{ r_h }:#{ r_m }] ")
run_time = (run_time.chomp.empty? || run_time.count(':') != 1) ? "#{r_h}:#{r_m}" : run_time.chomp
- sudo "dist/install.sh #{node_server['username']} #{ run_time.split(':')[0] } #{ run_time.split(':')[1] }"
- run "cat /home/#{node_server['username']}/.backup-log/install.log"
+ run "dist/install.sh -h#{ run_time.split(':')[0] } -m#{ run_time.split(':')[1] } #{ installdir }"
end
desc "uninstall backup-toolkit on node"
task :uninstall, :roles => :node do
- sudo "dist/uninstall.sh #{node_server['username']}"
+ run "dist/uninstall.sh #{ node_server['install_directory'] }"
end
end
+desc "deploy backup-toolkit to a remote server"
+task :deploy do
+ conf = Capistrano::CLI.ui.ask("create new node connection? {Y|n}") {|q| q.default = "yes"} .downcase
+ connection.node.create if /[Yy]/ =~ conf
+ conf = Capistrano::CLI.ui.ask("create new backup connection? {y|N}") {|q| q.default = "no"} .downcase
+ connection.backup.create if /[Yy]/ =~ conf
+ keys.sync.default
+ dist.install
+ node.create_jobs
+ node.jobs
+end
+
task :invoke do
end
View
@@ -1,5 +1,5 @@
# Should be deployed with:
-# config/config-repo.yml
+# config/connections-repo.yml
# lib/confighandler.rb
#
@@ -52,13 +52,15 @@ def get_jobs_on_node conf
Net::SSH.start(conf['hostname'], conf['username'], :auth_methods => ['publickey']) do |ssh|
local_hostname = ssh.exec!("hostname").chomp
# get backup dir
- backup_dir = ssh.exec!("cat /etc/backup-toolkit.conf | grep conf | awk '{ print $2 }'").chomp
- config_filenames = ssh.exec!("ls #{backup_dir}").split()
- for c in config_filenames
- config_dump = ssh.exec!("cat #{backup_dir}/#{c}")
- config_dump << "\n local_hostname: #{ local_hostname }"
- config_dump << "\n config_filename: #{ c }"
- conf = ConfigHandler::load_yaml( config_dump )
+ remote_master_config =
+ ConfigHandler::load_yaml(ssh.exec!("cat #{ conf['install_directory'] }/backup-toolkit.conf").chomp)
+ jobs_directory = remote_master_config['jobs_directory']
+ job_filenames = ssh.exec!("ls #{jobs_directory}").split()
+ for c in job_filenames
+ job_dump = ssh.exec!("cat #{jobs_directory}/#{c}")
+ job_dump << "\n local_hostname: #{ local_hostname }"
+ job_dump << "\n job_filename: #{ c }"
+ conf = ConfigHandler::load_yaml( job_dump )
# {}.shift
# {"a"=>{"b"=>1, "c"=>2}} becomes ["a", {"b"=>1, "c"=>2}]
type, conf = conf.shift
@@ -114,10 +116,10 @@ def generate_feeds_for_node node
jobs.each do |job|
files = find_files_on_backup job
content = RSS::Maker.make("2.0") do |m|
- m.channel.title = "backup-toolkit audit feed for node: #{ node['username'] }@#{ node['hostname'] }, job: #{ job['config_filename'] }"
+ m.channel.title = "backup-toolkit audit feed for node: #{ node['username'] }@#{ node['hostname'] }, job: #{ job['job_filename'] }"
m.channel.link = "http://slsdev.net"
m.channel.description = "node id: #{ node['id'] }; username: #{ node['username'] }; "\
- "hostname: #{ node['hostname'] }; job name: #{ job['config_filename'] }; "\
+ "hostname: #{ node['hostname'] }; job name: #{ job['job_filename'] }; "\
"backing up to: #{ job['backup_username'] }@#{ job['backup_hostname'] }:"\
"~/#{ job['backup_destination'] }"
m.items.do_sort = true
@@ -128,12 +130,12 @@ def generate_feeds_for_node node
i.description = "#{ f[:name] }: #{ f[:attributes].size } bytes"
i.date = Time.at(f[:attributes].mtime)
i.guid.content = "#{ node['username'] }-#{ node['hostname'] }-"\
- "#{ job['local_hostname'] }-#{ job['config_filename'] }-"\
+ "#{ job['local_hostname'] }-#{ job['job_filename'] }-"\
"#{ f[:name] }"
i.guid.isPermaLink = false
end
end
- feed_filename = "#{ node['username'] }-#{ node['hostname'] }-#{ job['local_hostname'] }-#{ job['config_filename'] }.xml"
+ feed_filename = "#{ node['username'] }-#{ node['hostname'] }-#{ job['local_hostname'] }-#{ job['job_filename'] }.xml"
feeds << {
:filename => feed_filename,
:content => content.to_s,
@@ -159,15 +161,15 @@ def run()
i.guid.isPermaLink = false
visited_nodes = {}
- ConfigHandler::all_nodes.each do |config|
- next if visited_nodes["#{ config['username'] }@#{ config['hostname'] }"]
- visited_nodes["#{ config['username'] }@#{ config['hostname'] }"] = true
+ ConfigHandler::all_nodes.each do |node|
+ next if visited_nodes["#{ node['username'] }@#{ node['hostname'] }"]
+ visited_nodes["#{ node['username'] }@#{ node['hostname'] }"] = true
- feeds = generate_feeds_for_node config
- puts "using #{ config.inspect }"
+ feeds = generate_feeds_for_node node
+ puts "using #{ node.inspect }"
for feed in feeds
# Add this feed's info to snapshot
- job_title = "#{feed[:job]['config_filename']} on #{feed[:job]['local_hostname']}"
+ job_title = "#{feed[:job]['job_filename']} on #{feed[:job]['local_hostname']}"
if Time.at(feed[:files].first()[:attributes].mtime) > one_day_ago
status_message = "up to date."
else
View
@@ -4,3 +4,4 @@ type: node
id: ubuntu-general-VM
hostname: 192.168.1.31
username: adam
+install_directory: /home/adam/backup-toolkit
View
@@ -2,36 +2,40 @@
#
# Get backup settings files and run all scheduled backups.
#
-# Expects directories on localhost:
-# ~/.backup-config
-# ~/.backup-staging
-# ~/.backup-log
+# Expects only the master config file: backup-toolkit.conf
#
+# This script generates and executes command line commands based on
+# the config.backup files contained in $install_directory/backup-jobs
+#
+
require 'yaml'
require 'fileutils'
require 'logger'
# how many times should we try to send the file?
MAX_RETRY = 3
-# who are we running as?
-begin
- MASTER_CONFIG = File.open("/etc/backup-toolkit.conf") { |yf| YAML::load( yf ) }
-rescue
- `echo "[$(date)] FAILED TO START, NO MASTER CONFIG FILE" >> /tmp/fail.log`
+# find master config
+found = false
+for filename in %w( backup-toolkit.conf ../backup-toolkit.conf ../backup-config/backup-toolkit.conf ../config/backup-toolkit.conf )
+ puts "looking for config in #{ File.join(File.dirname(__FILE__), filename) }"
+ filename = File.join(File.dirname(__FILE__), filename)
+ if File.exist? filename
+ MASTER_CONFIG = File.open(filename) { |yf| YAML::load( yf ) }
+ found = true
+ break
+ end
+end
+if not found
puts "Failed to find config file! ABORT"
- raise
+ raise "FAILED TO FIND CONFIG FILE"
end
-BACKUP_SETTINGS_DIR = Dir.new(MASTER_CONFIG['config_directory'])
+BACKUP_JOBS_DIR = Dir.new(MASTER_CONFIG['jobs_directory'])
BACKUP_STAGING_DIR = Dir.new(MASTER_CONFIG['staging_directory'])
BACKUP_LOGGING_DIR = Dir.new(MASTER_CONFIG['logging_directory'])
+BACKUP_BIN_DIR = Dir.new(MASTER_CONFIG['bin_directory'])
HOSTNAME = MASTER_CONFIG['local_hostname'] || `hostname`.chomp
-BACKUP_SETTINGS = []
-BACKUP_SETTINGS_DIR.each do |file|
- next unless /^.*\.backup$/ =~ file
- BACKUP_SETTINGS << File.open(File.join(BACKUP_SETTINGS_DIR.path, file)) { |f| YAML::load( f ) }
-end
# Setup logger
log_file = File.open(File.join(BACKUP_LOGGING_DIR.path, "run.log"), 'a')
@@ -40,41 +44,61 @@
log.info("starting backup of #{ HOSTNAME }")
+BACKUP_SETTINGS = []
+BACKUP_JOBS_DIR.each do |file|
+ next unless /^.*\.backup$/ =~ file
+ log.debug("loading file: #{ file }")
+ job = File.open(File.join(BACKUP_JOBS_DIR.path, file)) { |f| YAML::load( f ) }
+ # job is {type => { setting => 'value', setting_two => 'value' }}
+ # so we turn it into { type => 'type', setting => 'value', setting_two => 'value' }
+ job['type'] = job.clone().shift().first
+ job[job['type']].keys.each do |k|
+ job[k] = job[job['type']][k]
+ end
+ job['file'] = file
+ BACKUP_SETTINGS << job
+end
+
# GENERATE BACKUP FILES
for config in BACKUP_SETTINGS
# do all backups.
- temp = File.join(BACKUP_STAGING_DIR.path, '.out')
- if config['directory']
- s = config['directory']
- log.debug("doing directory backup of #{s['path']}")
- output = `tar-dump -d#{BACKUP_STAGING_DIR.path} #{s['path']}`
- elsif config['mysql']
- s = config['mysql']
- log.debug("doing mysql backup of #{s["database"]}")
- output = `mysql-dump -v -u#{s['username']} -p#{s['password']} -t#{BACKUP_STAGING_DIR.path} #{s['database']}`
+ case config['type']
+ when 'directory'
+ log.debug("doing directory backup of #{config['path']}")
+ output = `#{BACKUP_BIN_DIR.path}/tar-dump.sh -d#{BACKUP_STAGING_DIR.path} #{config['path']}`
+ when 'mysql'
+ log.debug("doing mysql backup of #{config["database"]}")
+ output = `#{BACKUP_BIN_DIR.path}/mysql-dump.sh -v -u#{config['username']} -p#{config['password']} -t#{BACKUP_STAGING_DIR.path} #{config['database']}`
+ else
+ puts "FAILURE! UNKNOWN BACKUP TYPE!" # FAIL
+ next
end
-
+
# Store generated values in config hash
# last line of output should return created filename.
local_filename = output.split.last
- s['local_filename'] = local_filename
- s['count'] = 0
+ config['local_filename'] = local_filename
+ config['count'] = 0
end
log.info("sending staged backups")
for config in BACKUP_SETTINGS
- s = config['directory'] || config['mysql']
- local_fullfilename = s['local_filename']
+ type = config['type']
+ unless config['local_filename'] # FAIL
+ log.error("backup file creation failed for job: #{ config['file'] }")
+ next
+ end
+ local_fullfilename = config['local_filename']
local_filename = File.split(local_fullfilename).last
- log.debug("sending #{local_filename} to #{s["backup_hostname"]}:#{s["backup_destination"]}")
+ log.debug("sending #{local_filename} to #{config["backup_hostname"]}:#{config["backup_destination"]}")
begin
# Copy file from local to remote, renaming to prepend the sender (this node's hostname).
- `nice scp #{local_fullfilename} #{s["backup_username"]}@#{s["backup_hostname"]}:#{s["backup_destination"]}/#{ HOSTNAME }-#{local_filename}`
- log.info "#{local_filename} sent to #{s["backup_hostname"]}:#{s["backup_destination"]}/#{ HOSTNAME }-#{local_filename}"
+ `nice scp #{local_fullfilename} #{config["backup_username"]}@#{config["backup_hostname"]}:#{config["backup_destination"]}/#{ HOSTNAME }-#{local_filename}`
+ log.info "#{local_filename} sent to #{config["backup_hostname"]}:#{config["backup_destination"]}/#{ HOSTNAME }-#{local_filename}"
rescue
- log.error("ERROR SENDING #{ local_filename } to #{ s["backup_hostname"] }:#{ s["backup_destination"] }")
- if s['count'] < MAX_RETRY
- s['count'] = s['count'] + 1
+ log.error("ERROR SENDING #{ local_filename } to #{ config["backup_hostname"] }:#{ config["backup_destination"] }")
+ if config['count'] < MAX_RETRY
+ config['count'] = config['count'] + 1
BACKUP_SETTINGS << config
end
end
Oops, something went wrong.

0 comments on commit 9dea418

Please sign in to comment.