Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Merge branch 'develop'

  • Loading branch information...
commit db86fec8bebcd0136ee241016e7b7eda72413808 2 parents 54a0acf + b2cd1bf
@meskyanichi meskyanichi authored
Showing with 969 additions and 447 deletions.
  1. +3 −1 Gemfile.lock
  2. +4 −3 README.md
  3. +8 −4 lib/backup.rb
  4. +1 −1  lib/backup/config.rb
  5. +23 −0 lib/backup/configuration/syncer/cloud.rb
  6. +30 −0 lib/backup/configuration/syncer/cloud_files.rb
  7. +5 −11 lib/backup/configuration/syncer/s3.rb
  8. +6 −0 lib/backup/dependency.rb
  9. +1 −1  lib/backup/notifier/twitter.rb
  10. +25 −0 lib/backup/syncer/base.rb
  11. +187 −0 lib/backup/syncer/cloud.rb
  12. +56 −0 lib/backup/syncer/cloud_files.rb
  13. +0 −26 lib/backup/syncer/rsync/base.rb
  14. +21 −102 lib/backup/syncer/s3.rb
  15. +1 −1  lib/backup/version.rb
  16. +2 −2 spec/cli/utility_spec.rb
  17. +44 −0 spec/configuration/syncer/cloud_files_spec.rb
  18. +0 −4 spec/configuration/syncer/s3_spec.rb
  19. +3 −3 spec/notifier/twitter_spec.rb
  20. +192 −0 spec/syncer/cloud_files_spec.rb
  21. +155 −191 spec/syncer/s3_spec.rb
  22. +20 −8 templates/cli/utility/archive
  23. +3 −3 templates/cli/utility/database/mongodb
  24. +4 −4 templates/cli/utility/database/mysql
  25. +4 −4 templates/cli/utility/database/postgresql
  26. +1 −1  templates/cli/utility/database/redis
  27. +2 −2 templates/cli/utility/encryptor/openssl
  28. +3 −3 templates/cli/utility/notifier/campfire
  29. +6 −6 templates/cli/utility/notifier/hipchat
  30. +7 −7 templates/cli/utility/notifier/mail
  31. +4 −4 templates/cli/utility/notifier/presently
  32. +2 −2 templates/cli/utility/notifier/prowl
  33. +4 −4 templates/cli/utility/notifier/twitter
  34. +22 −0 templates/cli/utility/storage/cloud_files
  35. +0 −12 templates/cli/utility/storage/cloudfiles
  36. +15 −10 templates/cli/utility/storage/dropbox
  37. +4 −4 templates/cli/utility/storage/ftp
  38. +1 −1  templates/cli/utility/storage/local
  39. +3 −3 templates/cli/utility/storage/ninefold
  40. +4 −4 templates/cli/utility/storage/rsync
  41. +6 −6 templates/cli/utility/storage/s3
  42. +4 −4 templates/cli/utility/storage/scp
  43. +4 −4 templates/cli/utility/storage/sftp
  44. +48 −0 templates/cli/utility/syncer/cloud_files
  45. +31 −1 templates/cli/utility/syncer/s3
View
4 Gemfile.lock
@@ -1,7 +1,7 @@
PATH
remote: .
specs:
- backup (3.0.21)
+ backup (3.0.22)
POpen4 (~> 0.1.4)
thor (~> 0.14.6)
@@ -71,6 +71,7 @@ GEM
net-ssh (2.1.4)
nokogiri (1.5.0)
open4 (1.3.0)
+ parallel (0.5.12)
polyglot (0.3.3)
prowler (1.3.1)
rack (1.4.0)
@@ -120,6 +121,7 @@ DEPENDENCIES
net-scp (~> 1.0.4)
net-sftp (~> 2.0.5)
net-ssh (~> 2.1.4)
+ parallel (~> 0.5.12)
prowler (>= 1.3.1)
rb-fsevent
rb-inotify
View
7 README.md
@@ -18,7 +18,7 @@ Drop me a message for any questions, suggestions, requests, bugs or submit them
Installation
------------
-To get the latest stable version (3.0.20)
+To get the latest stable version
gem install backup
@@ -94,7 +94,8 @@ Syncers
-------
- RSync (Push, Pull and Local)
-- Amazon Simple Storage Service (S3)
+- Amazon S3
+- Rackspce Cloud Files
[Syncer Wiki Page](https://github.com/meskyanichi/backup/wiki/Syncers)
@@ -238,7 +239,7 @@ First, it will dump the two Databases (MySQL and MongoDB). The MySQL dump will b
`sample_backup/databases/MySQL/my_sample_mysql_db.sql.gz`. The MongoDB dump will be dumped into
`sample_backup/databases/MongoDB/`, which will then be packaged into `sample_backup/databases/MongoDB-#####.tar.gz`
(`#####` will be a simple unique identifier, in case multiple dumps are performed.)
-Next, it will create two _tar_ Archives (user_avatars and logs). Each will be piped through the Gzip Compressor into
+Next, it will create two _tar_ Archives (user\_avatars and logs). Each will be piped through the Gzip Compressor into
`sample_backup/archives/` as `user_archives.tar.gz` and `logs.tar.gz`.
Finally, the `sample_backup` directory will be packaged into an uncompressed _tar_ archive, which will be piped through
the OpenSSL Encryptor to encrypt this final package into `YYYY-MM-DD-hh-mm-ss.sample_backup.tar.enc`. This final
View
12 lib/backup.rb
@@ -78,8 +78,10 @@ module Storage
##
# Autoload Backup syncer files
module Syncer
- autoload :Base, File.join(SYNCER_PATH, 'base')
- autoload :S3, File.join(SYNCER_PATH, 's3')
+ autoload :Base, File.join(SYNCER_PATH, 'base')
+ autoload :Cloud, File.join(SYNCER_PATH, 'cloud')
+ autoload :CloudFiles, File.join(SYNCER_PATH, 'cloud_files')
+ autoload :S3, File.join(SYNCER_PATH, 's3')
module RSync
autoload :Base, File.join(SYNCER_PATH, 'rsync', 'base')
autoload :Local, File.join(SYNCER_PATH, 'rsync', 'local')
@@ -174,8 +176,10 @@ module Storage
end
module Syncer
- autoload :Base, File.join(CONFIGURATION_PATH, 'syncer', 'base')
- autoload :S3, File.join(CONFIGURATION_PATH, 'syncer', 's3')
+ autoload :Base, File.join(CONFIGURATION_PATH, 'syncer', 'base')
+ autoload :Cloud, File.join(CONFIGURATION_PATH, 'syncer', 'cloud')
+ autoload :CloudFiles, File.join(CONFIGURATION_PATH, 'syncer', 'cloud_files')
+ autoload :S3, File.join(CONFIGURATION_PATH, 'syncer', 's3')
module RSync
autoload :Base, File.join(CONFIGURATION_PATH, 'syncer', 'rsync', 'base')
autoload :Local, File.join(CONFIGURATION_PATH, 'syncer', 'rsync', 'local')
View
2  lib/backup/config.rb
@@ -111,7 +111,7 @@ def add_dsl_constants!
# Encryptors
['OpenSSL', 'GPG'],
# Syncers
- ['S3', { 'RSync' => ['Push', 'Pull', 'Local'] }],
+ ['Rackspace', 'S3', { 'RSync' => ['Push', 'Pull', 'Local'] }],
# Notifiers
['Mail', 'Twitter', 'Campfire', 'Presently', 'Prowl', 'Hipchat']
]
View
23 lib/backup/configuration/syncer/cloud.rb
@@ -0,0 +1,23 @@
+# encoding: utf-8
+
+module Backup
+ module Configuration
+ module Syncer
+ class Cloud < Base
+ class << self
+ ##
+ # Amazon S3 bucket name and path to sync to
+ attr_accessor :bucket, :path
+
+ ##
+ # Directories to sync
+ attr_accessor :directories
+
+ ##
+ # Flag to enable mirroring
+ attr_accessor :mirror
+ end
+ end
+ end
+ end
+end
View
30 lib/backup/configuration/syncer/cloud_files.rb
@@ -0,0 +1,30 @@
+# encoding: utf-8
+
+module Backup
+ module Configuration
+ module Syncer
+ class CloudFiles < Cloud
+ class << self
+ ##
+ # Rackspace CloudFiles Credentials
+ attr_accessor :api_key, :username
+
+ ##
+ # Rackspace CloudFiles Container
+ attr_accessor :container
+
+ ##
+ # Rackspace AuthURL allows you to connect to a different Rackspace datacenter
+ # - https://auth.api.rackspacecloud.com (Default: US)
+ # - https://lon.auth.api.rackspacecloud.com (UK)
+ attr_accessor :auth_url
+
+ ##
+ # Improve performance and avoid data transfer costs by setting @servicenet to `true`
+ # This only works if Backup runs on a Rackspace server
+ attr_accessor :servicenet
+ end
+ end
+ end
+ end
+end
View
16 lib/backup/configuration/syncer/s3.rb
@@ -3,25 +3,19 @@
module Backup
module Configuration
module Syncer
- class S3 < Base
+ class S3 < Cloud
class << self
-
##
# Amazon Simple Storage Service (S3) Credentials
attr_accessor :access_key_id, :secret_access_key
##
- # Amazon S3 bucket name and path to sync to
- attr_accessor :bucket, :path
-
- ##
- # Flag to enable mirroring
- attr_accessor :mirror
+ # The S3 bucket to store files to
+ attr_accessor :bucket
##
- # Additional options for the s3sync cli
- attr_accessor :additional_options
-
+ # The AWS region of the specified S3 bucket
+ attr_accessor :region
end
end
end
View
6 lib/backup/dependency.rb
@@ -79,6 +79,12 @@ def self.all
:require => 'hipchat',
:version => '~> 0.4.1',
:for => 'Sending notifications to Hipchat'
+ },
+
+ 'parallel' => {
+ :require => 'parallel',
+ :version => '~> 0.5.12',
+ :for => 'Adding concurrency to Cloud-based syncers.'
}
}
end
View
2  lib/backup/notifier/twitter.rb
@@ -49,7 +49,7 @@ def notify!(status)
when :warning then 'Warning'
when :failure then 'Failure'
end
- message = "[Backup::%s] #{@model.label} (#{@model.trigger})" % name
+ message = "[Backup::%s] #{@model.label} (#{@model.trigger}) (@ #{@model.time})" % name
send_message(message)
end
View
25 lib/backup/syncer/base.rb
@@ -6,6 +6,31 @@ class Base
include Backup::CLI::Helpers
include Backup::Configuration::Helpers
+ ##
+ # Directories to sync
+ attr_accessor :directories
+
+ ##
+ # Path to store the synced files/directories to
+ attr_accessor :path
+
+ ##
+ # Flag for mirroring the files/directories
+ attr_accessor :mirror
+
+ ##
+ # Syntactical suger for the DSL for adding directories
+ def directories(&block)
+ return @directories unless block_given?
+ instance_eval(&block)
+ end
+
+ ##
+ # Adds a path to the @directories array
+ def add(path)
+ @directories << path
+ end
+
private
def syncer_name
View
187 lib/backup/syncer/cloud.rb
@@ -0,0 +1,187 @@
+# encoding: utf-8
+
+##
+# Only load the Fog gem, along with the Parallel gem, when the Backup::Syncer::Cloud class is loaded
+Backup::Dependency.load('fog')
+Backup::Dependency.load('parallel')
+
+module Backup
+ module Syncer
+ class Cloud < Base
+
+ ##
+ # Create a Mutex to synchronize certain parts of the code
+ # in order to prevent race conditions or broken STDOUT.
+ MUTEX = Mutex.new
+
+ ##
+ # Concurrency setting - defaults to false, but can be set to:
+ # - :threads
+ # - :processes
+ attr_accessor :concurrency_type
+
+ ##
+ # Concurrency level - the number of threads or processors to use. Defaults to 2.
+ attr_accessor :concurrency_level
+
+ ##
+ # Instantiates a new Cloud Syncer object and sets the default
+ # configuration specified in the Backup::Configuration::Syncer::S3. Then
+ # it sets the object defaults if particular properties weren't set.
+ # Finally it'll evaluate the users configuration file and overwrite
+ # anything that's been defined.
+ def initialize(&block)
+ load_defaults!
+
+ @path ||= 'backups'
+ @directories ||= Array.new
+ @mirror ||= false
+ @concurrency_type = false
+ @concurrency_level = 2
+
+ instance_eval(&block) if block_given?
+
+ @path = path.sub(/^\//, '')
+ end
+
+ ##
+ # Performs the Sync operation
+ def perform!
+ Logger.message("#{ self.class } started the syncing process:")
+
+ directories.each do |directory|
+ SyncContext.new(directory, repository_object, path).
+ sync! mirror, concurrency_type, concurrency_level
+ end
+ end
+
+ private
+
+ class SyncContext
+ attr_reader :directory, :bucket, :path
+
+ ##
+ # Creates a new SyncContext object which handles a single directory
+ # from the Syncer::Base @directories array.
+ def initialize(directory, bucket, path)
+ @directory, @bucket, @path = directory, bucket, path
+ end
+
+ ##
+ # Performs the sync operation using the provided techniques (mirroring/concurrency).
+ def sync!(mirror = false, concurrency_type = false, concurrency_level = 2)
+ block = Proc.new { |relative_path| sync_file relative_path, mirror }
+
+ case concurrency_type
+ when FalseClass
+ all_file_names.each &block
+ when :threads
+ Parallel.each all_file_names, :in_threads => concurrency_level, &block
+ when :processes
+ Parallel.each all_file_names, :in_processes => concurrency_level, &block
+ else
+ raise "Unknown concurrency_type setting: #{concurrency_type.inspect}"
+ end
+ end
+
+ private
+
+ ##
+ # Gathers all the remote and local file name and merges them together, removing
+ # duplicate keys if any, and sorts the in alphabetical order.
+ def all_file_names
+ @all_file_names ||= (local_files.keys | remote_files.keys).sort
+ end
+
+ ##
+ # Returns a Hash of local files (the keys are the filesystem paths,
+ # the values are the LocalFile objects for that given file)
+ def local_files
+ @local_files ||= begin
+ local_hashes.split("\n").collect { |line|
+ LocalFile.new directory, line
+ }.inject({}) { |hash, file|
+ hash[file.relative_path] = file
+ hash
+ }
+ end
+ end
+
+ ##
+ # Returns a String of file paths and their md5 hashes.
+ def local_hashes
+ MUTEX.synchronize { Logger.message("\s\sGenerating checksums for #{ directory }") }
+ `find #{directory} -print0 | xargs -0 openssl md5 2> /dev/null`
+ end
+
+ ##
+ # Returns a Hash of remote files (the keys are the remote paths,
+ # the values are the Fog file objects for that given file)
+ def remote_files
+ @remote_files ||= bucket.files.to_a.select { |file|
+ file.key[%r{^#{remote_base}/}]
+ }.inject({}) { |hash, file|
+ key = file.key.gsub(/^#{remote_base}\//,
+ "#{directory.split('/').last}/")
+ hash[key] = file
+ hash
+ }
+ end
+
+ ##
+ # Creates and returns a String that represents the base remote storage path
+ def remote_base
+ @remote_base ||= [path, directory.split('/').last].select { |part|
+ part && part.strip.length > 0
+ }.join('/')
+ end
+
+ ##
+ # Performs a sync operation on a file. When mirroring is enabled
+ # and a local file has been removed since the last sync, it will also
+ # remove it from the remote location. It will no upload files that
+ # have not changed since the last sync. Checks are done using an md5 hash.
+ # If a file has changed, or has been newly added, the file will be transferred/overwritten.
+ def sync_file(relative_path, mirror)
+ local_file = local_files[relative_path]
+ remote_file = remote_files[relative_path]
+
+ if local_file && File.exist?(local_file.path)
+ unless remote_file && remote_file.etag == local_file.md5
+ MUTEX.synchronize { Logger.message("\s\s[transferring] #{relative_path}") }
+ bucket.files.create(
+ :key => "#{path}/#{relative_path}".gsub(/^\//, ''),
+ :body => File.open(local_file.path)
+ )
+ else
+ MUTEX.synchronize { Logger.message("\s\s[skipping] #{relative_path}") }
+ end
+ elsif remote_file && mirror
+ MUTEX.synchronize { Logger.message("\s\s[removing] #{relative_path}") }
+ remote_file.destroy
+ end
+ end
+ end
+
+ class LocalFile
+ attr_reader :directory, :path, :md5
+
+ ##
+ # Creates a new LocalFile object using the given directory and line
+ # from the md5 hash checkup. This object figures out the path, relative_path and md5 hash
+ # for the file.
+ def initialize(directory, line)
+ @directory = directory
+ @path, @md5 = *line.chomp.match(/^MD5\(([^\)]+)\)= (\w+)$/).captures
+ end
+
+ ##
+ # Returns the relative path to the file.
+ def relative_path
+ @relative_path ||= path.gsub %r{^#{directory}},
+ directory.split('/').last
+ end
+ end
+ end
+ end
+end
View
56 lib/backup/syncer/cloud_files.rb
@@ -0,0 +1,56 @@
+# encoding: utf-8
+
+module Backup
+ module Syncer
+ class CloudFiles < Cloud
+
+ ##
+ # Rackspace CloudFiles Credentials
+ attr_accessor :api_key, :username
+
+ ##
+ # Rackspace CloudFiles Container
+ attr_accessor :container
+
+ ##
+ # Rackspace AuthURL allows you to connect to a different Rackspace datacenter
+ # - https://auth.api.rackspacecloud.com (Default: US)
+ # - https://lon.auth.api.rackspacecloud.com (UK)
+ attr_accessor :auth_url
+
+ ##
+ # Improve performance and avoid data transfer costs by setting @servicenet to `true`
+ # This only works if Backup runs on a Rackspace server
+ attr_accessor :servicenet
+
+ private
+
+ ##
+ # Established and creates a new Fog storage object for CloudFiles.
+ def connection
+ @connection ||= Fog::Storage.new(
+ :provider => provider,
+ :rackspace_username => username,
+ :rackspace_api_key => api_key,
+ :rackspace_auth_url => auth_url,
+ :rackspace_servicenet => servicenet
+ )
+ end
+
+ ##
+ # Creates a new @repository_object (container). Fetches it from Cloud Files
+ # if it already exists, otherwise it will create it first and fetch use that instead.
+ def repository_object
+ @repository_object ||= connection.directories.get(container) ||
+ connection.directories.create(:key => container)
+ end
+
+ ##
+ # This is the provider that Fog uses for the Cloud Files
+ def provider
+ "Rackspace"
+ end
+
+ end
+ end
+end
View
26 lib/backup/syncer/rsync/base.rb
@@ -4,19 +4,6 @@ module Backup
module Syncer
module RSync
class Base < Syncer::Base
-
- ##
- # Path to store the synced files/directories to
- attr_accessor :path
-
- ##
- # Directories to sync
- attr_writer :directories
-
- ##
- # Flag for mirroring the files/directories
- attr_accessor :mirror
-
##
# Additional options for the rsync cli
attr_accessor :additional_options
@@ -33,19 +20,6 @@ def initialize
@additional_options ||= Array.new
end
- ##
- # Syntactical suger for the DSL for adding directories
- def directories(&block)
- return @directories unless block_given?
- instance_eval(&block)
- end
-
- ##
- # Adds a path to the @directories array
- def add(path)
- @directories << path
- end
-
private
##
View
123 lib/backup/syncer/s3.rb
@@ -2,127 +2,46 @@
module Backup
module Syncer
- class S3 < Base
+ class S3 < Cloud
##
# Amazon Simple Storage Service (S3) Credentials
attr_accessor :access_key_id, :secret_access_key
##
- # Amazon S3 bucket name and path to sync to
- attr_accessor :bucket, :path
+ # The S3 bucket to store files to
+ attr_accessor :bucket
##
- # Directories to sync
- attr_writer :directories
-
- ##
- # Flag to enable mirroring
- attr_accessor :mirror
-
- ##
- # Additional options for the s3sync cli
- attr_accessor :additional_options
-
- ##
- # Instantiates a new S3 Syncer object and sets the default configuration
- # specified in the Backup::Configuration::Syncer::S3.
- # Then it sets the object defaults if particular properties weren't set.
- # Finally it'll evaluate the users configuration file and overwrite
- # anything that's been defined
- def initialize(&block)
- load_defaults!
-
- @path ||= 'backups'
- @directories = Array.new
- @mirror ||= false
- @additional_options ||= []
-
- instance_eval(&block) if block_given?
- end
-
- ##
- # Sets the Amazon S3 credentials for S3Sync, performs the S3Sync
- # operation, then unsets the credentials (back to nil values)
- def perform!
- set_environment_variables!
-
- @directories.each do |directory|
- Logger.message("#{ syncer_name } started syncing '#{ directory }'.")
- Logger.silent(
- run("#{ utility(:s3sync) } #{ options } " +
- "'#{ File.expand_path(directory) }' '#{ bucket }:#{ dest_path }'")
- )
- end
-
- unset_environment_variables!
- end
-
- ##
- # Syntactical suger for the DSL for adding directories
- def directories(&block)
- return @directories unless block_given?
- instance_eval(&block)
- end
-
- ##
- # Adds a path to the @directories array
- def add(path)
- @directories << path
- end
+ # The AWS region of the specified S3 bucket
+ attr_accessor :region
private
##
- # Return @path with preceeding '/' slash removed
- def dest_path
- @dest_path ||= @path.sub(/^\//, '')
- end
-
- ##
- # Returns all the specified S3Sync options,
- # concatenated, ready for the CLI
- def options
- ([verbose_option, recursive_option, mirror_option] +
- additional_options).compact.join("\s")
- end
-
- ##
- # Returns S3Sync syntax for enabling mirroring
- def mirror_option
- '--delete' if @mirror
- end
-
- ##
- # Returns S3Sync syntax for syncing recursively
- def recursive_option
- '--recursive'
+ # Established and creates a new Fog storage object for S3.
+ def connection
+ @connection ||= Fog::Storage.new(
+ :provider => provider,
+ :aws_access_key_id => access_key_id,
+ :aws_secret_access_key => secret_access_key,
+ :region => region
+ )
end
##
- # Returns S3Sync syntax for making output verbose
- def verbose_option
- '--verbose'
+ # Creates a new @repository_object (bucket). Fetches it from S3
+ # if it already exists, otherwise it will create it first and fetch use that instead.
+ def repository_object
+ @repository_object ||= connection.directories.get(bucket) ||
+ connection.directories.create(:key => bucket, :location => region)
end
##
- # In order for S3Sync to know what credentials to use, we have to set the
- # AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY environment variables, these
- # evironment variables will be used by S3Sync
- def set_environment_variables!
- ENV['AWS_ACCESS_KEY_ID'] = access_key_id
- ENV['AWS_SECRET_ACCESS_KEY'] = secret_access_key
- ENV['AWS_CALLING_FORMAT'] = 'SUBDOMAIN'
+ # This is the provider that Fog uses for the Cloud Files
+ def provider
+ "AWS"
end
-
- ##
- # Sets the AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY back to nil
- def unset_environment_variables!
- ENV['AWS_ACCESS_KEY_ID'] = nil
- ENV['AWS_SECRET_ACCESS_KEY'] = nil
- ENV['AWS_CALLING_FORMAT'] = nil
- end
-
end
end
end
View
2  lib/backup/version.rb
@@ -13,7 +13,7 @@ class Version
# Defines the minor version
# PATCH:
# Defines the patch version
- MAJOR, MINOR, PATCH = 3, 0, 21
+ MAJOR, MINOR, PATCH = 3, 0, 22
##
# Returns the major version ( big release based off of multiple minor releases )
View
4 spec/cli/utility_spec.rb
@@ -187,8 +187,8 @@
--trigger=TRIGGER
[--config-path=CONFIG_PATH] # Path to your Backup configuration directory
[--databases=DATABASES] # (mongodb, mysql, postgresql, redis, riak)
- [--storages=STORAGES] # (cloudfiles, dropbox, ftp, local, ninefold, rsync, s3, scp, sftp)
- [--syncers=SYNCERS] # (rsync_local, rsync_pull, rsync_push, s3)
+ [--storages=STORAGES] # (cloud_files, dropbox, ftp, local, ninefold, rsync, s3, scp, sftp)
+ [--syncers=SYNCERS] # (cloud_files, rsync_local, rsync_pull, rsync_push, s3)
[--encryptors=ENCRYPTORS] # (gpg, openssl)
[--compressors=COMPRESSORS] # (bzip2, gzip, lzma, pbzip2)
[--notifiers=NOTIFIERS] # (campfire, hipchat, mail, presently, prowl, twitter)
View
44 spec/configuration/syncer/cloud_files_spec.rb
@@ -0,0 +1,44 @@
+# encoding: utf-8
+
+require File.expand_path('../../../spec_helper.rb', __FILE__)
+
+describe Backup::Configuration::Syncer::CloudFiles do
+ before do
+ Backup::Configuration::Syncer::CloudFiles.defaults do |cf|
+ cf.username = 'my-username'
+ cf.api_key = 'my-api-key'
+ cf.container = 'my-container'
+ cf.auth_url = 'my-auth-url'
+ cf.servicenet = true
+ cf.path = '/backups/'
+ cf.mirror = true
+ end
+ end
+ after { Backup::Configuration::Syncer::CloudFiles.clear_defaults! }
+
+ it 'should set the default cloud files configuration' do
+ cf = Backup::Configuration::Syncer::CloudFiles
+ cf.username.should == 'my-username'
+ cf.api_key.should == 'my-api-key'
+ cf.container.should == 'my-container'
+ cf.auth_url.should == 'my-auth-url'
+ cf.servicenet.should == true
+ cf.path.should == '/backups/'
+ cf.mirror.should == true
+ end
+
+ describe '#clear_defaults!' do
+ it 'should clear all the defaults, resetting them to nil' do
+ Backup::Configuration::Syncer::CloudFiles.clear_defaults!
+
+ cf = Backup::Configuration::Syncer::CloudFiles
+ cf.username.should == nil
+ cf.api_key.should == nil
+ cf.container.should == nil
+ cf.auth_url.should == nil
+ cf.servicenet.should == nil
+ cf.path.should == nil
+ cf.mirror.should == nil
+ end
+ end
+end
View
4 spec/configuration/syncer/s3_spec.rb
@@ -9,9 +9,7 @@
s3.secret_access_key = 'my_secret_access_key'
s3.bucket = 'my-bucket'
s3.path = '/backups/'
- #s3.directories = 'cannot_have_a_default_value'
s3.mirror = true
- s3.additional_options = ['--exclude="*.rb"']
end
end
after { Backup::Configuration::Syncer::S3.clear_defaults! }
@@ -23,7 +21,6 @@
s3.bucket.should == 'my-bucket'
s3.path.should == '/backups/'
s3.mirror.should == true
- s3.additional_options.should == ['--exclude="*.rb"']
end
describe '#clear_defaults!' do
@@ -36,7 +33,6 @@
s3.bucket.should == nil
s3.path.should == nil
s3.mirror.should == nil
- s3.additional_options.should == nil
end
end
end
View
6 spec/notifier/twitter_spec.rb
@@ -88,7 +88,7 @@
context 'when status is :success' do
it 'should send Success message' do
notifier.expects(:send_message).with(
- '[Backup::Success] test label (test_trigger)'
+ "[Backup::Success] test label (test_trigger) (@ #{notifier.instance_variable_get("@model").time})"
)
notifier.send(:notify!, :success)
end
@@ -97,7 +97,7 @@
context 'when status is :warning' do
it 'should send Warning message' do
notifier.expects(:send_message).with(
- '[Backup::Warning] test label (test_trigger)'
+ "[Backup::Warning] test label (test_trigger) (@ #{notifier.instance_variable_get("@model").time})"
)
notifier.send(:notify!, :warning)
end
@@ -106,7 +106,7 @@
context 'when status is :failure' do
it 'should send Failure message' do
notifier.expects(:send_message).with(
- '[Backup::Failure] test label (test_trigger)'
+ "[Backup::Failure] test label (test_trigger) (@ #{notifier.instance_variable_get("@model").time})"
)
notifier.send(:notify!, :failure)
end
View
192 spec/syncer/cloud_files_spec.rb
@@ -0,0 +1,192 @@
+# encoding: utf-8
+require File.expand_path('../../spec_helper.rb', __FILE__)
+
+class Parallel; end
+
+describe Backup::Syncer::CloudFiles do
+ describe '#perform!' do
+ let(:syncer) { Backup::Syncer::CloudFiles.new }
+ let(:connection) { stub('connection',
+ :directories => stub('directories', :get => container)) }
+ let(:container) { stub('container', :files => files) }
+ let(:files) { [] }
+ let(:content) { stub('content') }
+
+ before :each do
+ Fog::Storage.stubs(:new).returns connection
+ File.stubs(:open).returns content
+ File.stubs(:exist?).returns true
+ files.stubs(:create).returns true
+
+ syncer.directories << 'tmp'
+ syncer.path = 'storage'
+ Backup::Syncer::S3::SyncContext.any_instance.
+ stubs(:`).returns 'MD5(tmp/foo)= 123abcdef'
+ end
+
+ it "respects the concurrency_type setting with threads" do
+ syncer.concurrency_type = :threads
+
+ Parallel.expects(:each).with(anything, {:in_threads => 2}, anything)
+
+ syncer.perform!
+ end
+
+ it "respects the parallel thread count" do
+ syncer.concurrency_type = :threads
+ syncer.concurrency_level = 10
+
+ Parallel.expects(:each).with(anything, {:in_threads => 10}, anything)
+
+ syncer.perform!
+ end
+
+ it "respects the concurrency_type setting with processors" do
+ syncer.concurrency_type = :processes
+
+ Parallel.expects(:each).with(anything, {:in_processes => 2}, anything)
+
+ syncer.perform!
+ end
+
+ it "respects the parallel thread count" do
+ syncer.concurrency_type = :processes
+ syncer.concurrency_level = 10
+
+ Parallel.expects(:each).with(anything, {:in_processes => 10}, anything)
+
+ syncer.perform!
+ end
+
+ context 'file exists locally' do
+ it "uploads a file if it does not exist remotely" do
+ files.expects(:create).with(:key => 'storage/tmp/foo', :body => content)
+
+ syncer.perform!
+ end
+
+ it "uploads a file if it exists remotely with a different MD5" do
+ files << stub('file', :key => 'storage/tmp/foo', :etag => 'abcdef123')
+
+ files.expects(:create).with(:key => 'storage/tmp/foo', :body => content)
+
+ syncer.perform!
+ end
+
+ it "does nothing if the file exists remotely with the same MD5" do
+ files << stub('file', :key => 'storage/tmp/foo', :etag => '123abcdef')
+
+ files.expects(:create).never
+
+ syncer.perform!
+ end
+
+ it "skips the file if it no longer exists locally" do
+ File.stubs(:exist?).returns false
+
+ files.expects(:create).never
+
+ syncer.perform!
+ end
+
+ it "respects the given path" do
+ syncer.path = 'box'
+
+ files.expects(:create).with(:key => 'box/tmp/foo', :body => content)
+
+ syncer.perform!
+ end
+
+ it "uploads the content of the local file" do
+ File.expects(:open).with('tmp/foo').returns content
+
+ syncer.perform!
+ end
+
+ it "creates the connection with the provided credentials" do
+ syncer.api_key = 'my-key'
+ syncer.username = 'my-name'
+ syncer.auth_url = 'my-auth'
+ syncer.servicenet = 'my-servicenet'
+
+ Fog::Storage.expects(:new).with(
+ :provider => 'Rackspace',
+ :rackspace_api_key => 'my-key',
+ :rackspace_username => 'my-name',
+ :rackspace_auth_url => 'my-auth',
+ :rackspace_servicenet => 'my-servicenet'
+ ).returns connection
+
+ syncer.perform!
+ end
+
+ it "uses the container with the given name" do
+ syncer.container = 'leaky'
+
+ connection.directories.expects(:get).with('leaky').returns(container)
+
+ syncer.perform!
+ end
+
+ it "creates the container if one does not exist" do
+ syncer.container = 'leaky'
+ connection.directories.stubs(:get).returns nil
+
+ connection.directories.expects(:create).
+ with(:key => 'leaky').returns(container)
+
+ syncer.perform!
+ end
+
+ it "iterates over each directory" do
+ syncer.directories << 'files'
+
+ Backup::Syncer::CloudFiles::SyncContext.any_instance.expects(:`).
+ with('find tmp -print0 | xargs -0 openssl md5 2> /dev/null').
+ returns 'MD5(tmp/foo)= 123abcdef'
+ Backup::Syncer::CloudFiles::SyncContext.any_instance.expects(:`).
+ with('find files -print0 | xargs -0 openssl md5 2> /dev/null').
+ returns 'MD5(tmp/foo)= 123abcdef'
+
+ syncer.perform!
+ end
+ end
+
+ context 'file does not exist locally' do
+ let(:file) { stub('file', :key => 'storage/tmp/foo',
+ :etag => '123abcdef') }
+
+ before :each do
+ Backup::Syncer::CloudFiles::SyncContext.any_instance.
+ stubs(:`).returns ''
+ files << file
+ File.stubs(:exist?).returns false
+ end
+
+ it "removes the remote file when mirroring is turned on" do
+ syncer.mirror = true
+
+ file.expects(:destroy).once
+
+ syncer.perform!
+ end
+
+ it "leaves the remote file when mirroring is turned off" do
+ syncer.mirror = false
+
+ file.expects(:destroy).never
+
+ syncer.perform!
+ end
+
+ it "does not remove files not under one of the specified directories" do
+ file.stubs(:key).returns 'unsynced/tmp/foo'
+ syncer.mirror = true
+
+ file.expects(:destroy).never
+
+ syncer.perform!
+ end
+ end
+ end
+end
View
346 spec/syncer/s3_spec.rb
@@ -1,228 +1,192 @@
# encoding: utf-8
-
require File.expand_path('../../spec_helper.rb', __FILE__)
+class Parallel; end
+
describe Backup::Syncer::S3 do
- let(:syncer) do
- Backup::Syncer::S3.new do |s3|
- s3.access_key_id = 'my_access_key_id'
- s3.secret_access_key = 'my_secret_access_key'
- s3.bucket = 'my-bucket'
- s3.path = "/my_backups"
-
- s3.directories do |directory|
- directory.add "/some/directory"
- directory.add "~/home/directory"
- end
+ describe '#perform!' do
+ let(:syncer) { Backup::Syncer::S3.new }
+ let(:connection) { stub('connection',
+ :directories => stub('directories', :get => bucket)) }
+ let(:bucket) { stub('bucket', :files => files) }
+ let(:files) { [] }
+ let(:content) { stub('content') }
+
+ before :each do
+ Fog::Storage.stubs(:new).returns connection
+ File.stubs(:open).returns content
+ File.stubs(:exist?).returns true
+ files.stubs(:create).returns true
+
+ syncer.directories << 'tmp'
+ syncer.path = 'storage'
+
+ Backup::Syncer::S3::SyncContext.any_instance.
+ stubs(:`).returns 'MD5(tmp/foo)= 123abcdef'
+ end
- s3.mirror = true
- s3.additional_options = ['--opt-a', '--opt-b']
+ it "respects the concurrency_type setting with threads" do
+ syncer.concurrency_type = :threads
+
+ Parallel.expects(:each).with(anything, {:in_threads => 2}, anything)
+
+ syncer.perform!
end
- end
- describe '#initialize' do
+ it "respects the parallel thread count" do
+ syncer.concurrency_type = :threads
+ syncer.concurrency_level = 10
- it 'should have defined the configuration properly' do
- syncer.access_key_id.should == 'my_access_key_id'
- syncer.secret_access_key.should == 'my_secret_access_key'
- syncer.bucket.should == 'my-bucket'
- syncer.path.should == '/my_backups'
- syncer.directories.should == ["/some/directory", "~/home/directory"]
- syncer.mirror.should == true
- syncer.additional_options.should == ['--opt-a', '--opt-b']
+ Parallel.expects(:each).with(anything, {:in_threads => 10}, anything)
+
+ syncer.perform!
end
- context 'when options are not set' do
- it 'should use default values' do
- syncer = Backup::Syncer::S3.new
- syncer.access_key_id.should == nil
- syncer.secret_access_key.should == nil
- syncer.bucket.should == nil
- syncer.path.should == 'backups'
- syncer.directories.should == []
- syncer.mirror.should == false
- syncer.additional_options.should == []
- end
+ it "respects the concurrency_type setting with processors" do
+ syncer.concurrency_type = :processes
+
+ Parallel.expects(:each).with(anything, {:in_processes => 2}, anything)
+
+ syncer.perform!
end
- context 'when setting configuration defaults' do
- after { Backup::Configuration::Syncer::S3.clear_defaults! }
-
- it 'should use the configured defaults' do
- Backup::Configuration::Syncer::S3.defaults do |s3|
- s3.access_key_id = 'some_access_key_id'
- s3.secret_access_key = 'some_secret_access_key'
- s3.bucket = 'some_bucket'
- s3.path = 'some_path'
- #s3.directories = 'cannot_have_a_default_value'
- s3.mirror = 'some_mirror'
- s3.additional_options = 'some_additional_options'
- end
- syncer = Backup::Syncer::S3.new
- syncer.access_key_id.should == 'some_access_key_id'
- syncer.secret_access_key.should == 'some_secret_access_key'
- syncer.bucket.should == 'some_bucket'
- syncer.path.should == 'some_path'
- syncer.directories.should == []
- syncer.mirror.should == 'some_mirror'
- syncer.additional_options.should == 'some_additional_options'
+ it "respects the parallel thread count" do
+ syncer.concurrency_type = :processes
+ syncer.concurrency_level = 10
+
+ Parallel.expects(:each).with(anything, {:in_processes => 10}, anything)
+
+ syncer.perform!
+ end
+
+ context 'file exists locally' do
+ it "uploads a file if it does not exist remotely" do
+ files.expects(:create).with(:key => 'storage/tmp/foo', :body => content)
+
+ syncer.perform!
end
- it 'should override the configured defaults' do
- Backup::Configuration::Syncer::S3.defaults do |s3|
- s3.access_key_id = 'old_access_key_id'
- s3.secret_access_key = 'old_secret_access_key'
- s3.bucket = 'old_bucket'
- s3.path = 'old_path'
- #s3.directories = 'cannot_have_a_default_value'
- s3.mirror = 'old_mirror'
- s3.additional_options = 'old_additional_options'
- end
- syncer = Backup::Syncer::S3.new do |s3|
- s3.access_key_id = 'new_access_key_id'
- s3.secret_access_key = 'new_secret_access_key'
- s3.bucket = 'new_bucket'
- s3.path = 'new_path'
- s3.directories = 'new_directories'
- s3.mirror = 'new_mirror'
- s3.additional_options = 'new_additional_options'
- end
-
- syncer.access_key_id.should == 'new_access_key_id'
- syncer.secret_access_key.should == 'new_secret_access_key'
- syncer.bucket.should == 'new_bucket'
- syncer.path.should == 'new_path'
- syncer.directories.should == 'new_directories'
- syncer.mirror.should == 'new_mirror'
- syncer.additional_options.should == 'new_additional_options'
+ it "uploads a file if it exists remotely with a different MD5" do
+ files << stub('file', :key => 'storage/tmp/foo', :etag => 'abcdef123')
+
+ files.expects(:create).with(:key => 'storage/tmp/foo', :body => content)
+
+ syncer.perform!
end
- end # context 'when setting configuration defaults'
- end # describe '#initialize'
- describe '#perform!' do
- let(:s) { sequence '' }
+ it "does nothing if the file exists remotely with the same MD5" do
+ files << stub('file', :key => 'storage/tmp/foo', :etag => '123abcdef')
- before do
- syncer.expects(:utility).twice.with(:s3sync).returns('s3sync')
- syncer.expects(:options).twice.returns('options_output')
- end
+ files.expects(:create).never
- it 'should sync two directories' do
- syncer.expects(:set_environment_variables!).in_sequence(s)
-
- # first directory
- Backup::Logger.expects(:message).in_sequence(s).with(
- "Syncer::S3 started syncing '/some/directory'."
- )
- syncer.expects(:run).in_sequence(s).with(
- "s3sync options_output '/some/directory' 'my-bucket:my_backups'"
- ).returns('messages from stdout')
- Backup::Logger.expects(:silent).in_sequence(s).with('messages from stdout')
-
- # second directory
- Backup::Logger.expects(:message).in_sequence(s).with(
- "Syncer::S3 started syncing '~/home/directory'."
- )
- syncer.expects(:run).in_sequence(s).with(
- "s3sync options_output '#{ File.expand_path('~/home/directory') }' " +
- "'my-bucket:my_backups'"
- ).returns('messages from stdout')
- Backup::Logger.expects(:silent).in_sequence(s).with('messages from stdout')
-
- syncer.expects(:unset_environment_variables!).in_sequence(s)
+ syncer.perform!
+ end
- syncer.perform!
- end
- end # describe '#perform!'
+ it "skips the file if it no longer exists locally" do
+ File.stubs(:exist?).returns false
+
+ files.expects(:create).never
- describe '#directories' do
- context 'when no block is given' do
- it 'should return @directories' do
- syncer.directories.should ==
- ['/some/directory', '~/home/directory']
+ syncer.perform!
end
- end
- context 'when a block is given' do
- it 'should evalute the block, allowing #add to add directories' do
- syncer.directories do
- add '/new/path'
- add '~/new/home/path'
- end
- syncer.directories.should == [
- '/some/directory',
- '~/home/directory',
- '/new/path',
- '~/new/home/path'
- ]
+ it "respects the given path" do
+ syncer.path = 'box'
+
+ files.expects(:create).with(:key => 'box/tmp/foo', :body => content)
+
+ syncer.perform!
end
- end
- end # describe '#directories'
- describe '#add' do
- it 'should add the given path to @directories' do
- syncer.add '/my/path'
- syncer.directories.should ==
- ['/some/directory', '~/home/directory', '/my/path']
- end
- end
+ it "uploads the content of the local file" do
+ File.expects(:open).with('tmp/foo').returns content
- describe '#dest_path' do
- it 'should remove any preceeding "/" from @path' do
- syncer.send(:dest_path).should == 'my_backups'
- end
+ syncer.perform!
+ end
- it 'should set @dest_path' do
- syncer.send(:dest_path)
- syncer.instance_variable_get(:@dest_path).should == 'my_backups'
- end
+ it "creates the connection with the provided credentials" do
+ syncer.access_key_id = 'my-access'
+ syncer.secret_access_key = 'my-secret'
+ syncer.region = 'somewhere'
- it 'should return @dest_path if already set' do
- syncer.instance_variable_set(:@dest_path, 'foo')
- syncer.send(:dest_path).should == 'foo'
- end
- end
+ Fog::Storage.expects(:new).with(
+ :provider => 'AWS',
+ :aws_access_key_id => 'my-access',
+ :aws_secret_access_key => 'my-secret',
+ :region => 'somewhere'
+ ).returns connection
- describe '#options' do
- context 'when @mirror is true' do
- it 'should return the options with mirroring enabled' do
- syncer.send(:options).should ==
- '--verbose --recursive --delete --opt-a --opt-b'
+ syncer.perform!
end
- end
- context 'when @mirror is false' do
- before { syncer.mirror = false }
- it 'should return the options without mirroring enabled' do
- syncer.send(:options).should ==
- '--verbose --recursive --opt-a --opt-b'
+ it "uses the bucket with the given name" do
+ syncer.bucket = 'leaky'
+
+ connection.directories.expects(:get).with('leaky').returns(bucket)
+
+ syncer.perform!
end
- end
- context 'with no additional options' do
- before { syncer.additional_options = [] }
- it 'should return the options without additional options' do
- syncer.send(:options).should ==
- '--verbose --recursive --delete'
+ it "creates the bucket if one does not exist" do
+ syncer.bucket = 'leaky'
+ syncer.region = 'elsewhere'
+ connection.directories.stubs(:get).returns nil
+
+ connection.directories.expects(:create).
+ with(:key => 'leaky', :location => 'elsewhere').returns(bucket)
+
+ syncer.perform!
+ end
+
+ it "iterates over each directory" do
+ syncer.directories << 'files'
+
+ Backup::Syncer::S3::SyncContext.any_instance.expects(:`).
+ with('find tmp -print0 | xargs -0 openssl md5 2> /dev/null').
+ returns 'MD5(tmp/foo)= 123abcdef'
+ Backup::Syncer::S3::SyncContext.any_instance.expects(:`).
+ with('find files -print0 | xargs -0 openssl md5 2> /dev/null').
+ returns 'MD5(tmp/foo)= 123abcdef'
+
+ syncer.perform!
end
end
- end # describe '#options'
-
- describe 'changing environment variables' do
- before { @env = ENV }
- after { ENV.replace(@env) }
-
- it 'should set and unset environment variables' do
- syncer.send(:set_environment_variables!)
- ENV['AWS_ACCESS_KEY_ID'].should == 'my_access_key_id'
- ENV['AWS_SECRET_ACCESS_KEY'].should == 'my_secret_access_key'
- ENV['AWS_CALLING_FORMAT'].should == 'SUBDOMAIN'
-
- syncer.send(:unset_environment_variables!)
- ENV['AWS_ACCESS_KEY_ID'].should == nil
- ENV['AWS_SECRET_ACCESS_KEY'].should == nil
- ENV['AWS_CALLING_FORMAT'].should == nil
+
+ context 'file does not exist locally' do
+ let(:file) { stub('file', :key => 'storage/tmp/foo',
+ :etag => '123abcdef') }
+
+ before :each do
+ Backup::Syncer::S3::SyncContext.any_instance.
+ stubs(:`).returns ''
+ files << file
+ File.stubs(:exist?).returns false
+ end
+
+ it "removes the remote file when mirroring is turned on" do
+ syncer.mirror = true
+
+ file.expects(:destroy).once
+
+ syncer.perform!
+ end
+
+ it "leaves the remote file when mirroring is turned off" do
+ syncer.mirror = false
+
+ file.expects(:destroy).never
+
+ syncer.perform!
+ end
+
+ it "does not remove files not under one of the specified directories" do
+ file.stubs(:key).returns 'unsynced/tmp/foo'
+ syncer.mirror = true
+
+ file.expects(:destroy).never
+
+ syncer.perform!
+ end
end
end
-
end
View
28 templates/cli/utility/archive
@@ -1,13 +1,25 @@
##
# Archive [Archive]
#
+ # Adding a file:
+ #
+ # archive.add "/path/to/a/file.rb"
+ #
+ # Adding an directory (including sub-directories):
+ #
+ # archive.add "/path/to/a/directory/"
+ #
+ # Excluding a file:
+ #
+ # archive.exclude "/path/to/an/excluded_file.rb"
+ #
+ # Excluding a directory (including sub-directories):
+ #
+ # archive.exclude "/path/to/an/excluded_directory/
+ #
archive :my_archive do |archive|
- # add a file
- archive.add '/path/to/a/file.rb'
- # add a folder (including sub-folders)
- archive.add '/path/to/a/folder/'
- # exclude a file
- archive.exclude '/path/to/a/excluded_file.rb'
- # exclude a folder (including sub-folders)
- archive.exclude '/path/to/a/excluded_folder/'
+ archive.add "/path/to/a/file.rb"
+ archive.add "/path/to/a/folder/"
+ archive.exclude "/path/to/a/excluded_file.rb"
+ archive.exclude "/path/to/a/excluded_folder/"
end
View
6 templates/cli/utility/database/mongodb
@@ -8,11 +8,11 @@
db.host = "localhost"
db.port = 5432
db.ipv6 = false
- db.only_collections = ['only', 'these' 'collections']
+ db.only_collections = ["only", "these" "collections"]
db.additional_options = []
db.lock = false
# Optional: Use to set the location of these utilities
# if they cannot be found by their name in your $PATH
- # db.mongodump_utility = '/opt/local/bin/mongodump'
- # db.mongo_utility = '/opt/local/bin/mongo'
+ # db.mongodump_utility = "/opt/local/bin/mongodump"
+ # db.mongo_utility = "/opt/local/bin/mongo"
end
View
8 templates/cli/utility/database/mysql
@@ -8,10 +8,10 @@
db.host = "localhost"
db.port = 3306
db.socket = "/tmp/mysql.sock"
- db.skip_tables = ['skip', 'these', 'tables']
- db.only_tables = ['only', 'these' 'tables']
- db.additional_options = ['--quick', '--single-transaction']
+ db.skip_tables = ["skip", "these", "tables"]
+ db.only_tables = ["only", "these" "tables"]
+ db.additional_options = ["--quick", "--single-transaction"]
# Optional: Use to set the location of this utility
# if it cannot be found by name in your $PATH
- # db.mysqldump_utility = '/opt/local/bin/mysqldump'
+ # db.mysqldump_utility = "/opt/local/bin/mysqldump"
end
View
8 templates/cli/utility/database/postgresql
@@ -8,10 +8,10 @@
db.host = "localhost"
db.port = 5432
db.socket = "/tmp/pg.sock"
- db.skip_tables = ['skip', 'these', 'tables']
- db.only_tables = ['only', 'these' 'tables']
- db.additional_options = ['-xc', '-E=utf8']
+ db.skip_tables = ["skip", "these", "tables"]
+ db.only_tables = ["only", "these" "tables"]
+ db.additional_options = ["-xc", "-E=utf8"]
# Optional: Use to set the location of this utility
# if it cannot be found by name in your $PATH
- # db.pg_dump_utility = '/opt/local/bin/pg_dump'
+ # db.pg_dump_utility = "/opt/local/bin/pg_dump"
end
View
2  templates/cli/utility/database/redis
@@ -12,5 +12,5 @@
db.invoke_save = true
# Optional: Use to set the location of this utility
# if it cannot be found by name in your $PATH
- # db.redis_cli_utility = '/opt/local/bin/redis-cli'
+ # db.redis_cli_utility = "/opt/local/bin/redis-cli"
end
View
4 templates/cli/utility/encryptor/openssl
@@ -2,8 +2,8 @@
# OpenSSL [Encryptor]
#
encrypt_with OpenSSL do |encryption|
- encryption.password = 'my_password' # From String
- encryption.password_file = '/path/to/password/file' # Or from File
+ encryption.password = "my_password" # From String
+ encryption.password_file = "/path/to/password/file" # Or from File
encryption.base64 = true
encryption.salt = true
end
View
6 templates/cli/utility/notifier/campfire
@@ -6,7 +6,7 @@
campfire.on_warning = true
campfire.on_failure = true
- campfire.api_token = 'my_api_authentication_token'
- campfire.subdomain = 'my_subdomain'
- campfire.room_id = 'my_room_id'
+ campfire.api_token = "my_api_authentication_token"
+ campfire.subdomain = "my_subdomain"
+ campfire.room_id = "my_room_id"
end
View
12 templates/cli/utility/notifier/hipchat
@@ -6,10 +6,10 @@
hipchat.on_warning = true
hipchat.on_failure = true
- hipchat.token = 'token'
- hipchat.from = 'DB Backup'
- hipchat.rooms_notified = ['activity']
- hipchat.success_color = 'green'
- hipchat.warning_color = 'yellow'
- hipchat.failure_color = 'red'
+ hipchat.token = "token"
+ hipchat.from = "DB Backup"
+ hipchat.rooms_notified = ["activity"]
+ hipchat.success_color = "green"
+ hipchat.warning_color = "yellow"
+ hipchat.failure_color = "red"
end
View
14 templates/cli/utility/notifier/mail
@@ -10,13 +10,13 @@
mail.on_warning = true
mail.on_failure = true
- mail.from = 'sender@email.com'
- mail.to = 'receiver@email.com'
- mail.address = 'smtp.gmail.com'
+ mail.from = "sender@email.com"
+ mail.to = "receiver@email.com"
+ mail.address = "smtp.gmail.com"
mail.port = 587
- mail.domain = 'your.host.name'
- mail.user_name = 'sender@email.com'
- mail.password = 'my_password'
- mail.authentication = 'plain'
+ mail.domain = "your.host.name"
+ mail.user_name = "sender@email.com"
+ mail.password = "my_password"
+ mail.authentication = "plain"
mail.enable_starttls_auto = true
end
View
8 templates/cli/utility/notifier/presently
@@ -6,8 +6,8 @@
presently.on_warning = true
presently.on_failure = true
- presently.subdomain = 'my_subdomain'
- presently.user_name = 'my_user_name'
- presently.password = 'my_password'
- presently.group_id = 'my_group_id' # optional
+ presently.subdomain = "my_subdomain"
+ presently.user_name = "my_user_name"
+ presently.password = "my_password"
+ presently.group_id = "my_group_id" # optional
end
View
4 templates/cli/utility/notifier/prowl
@@ -6,6 +6,6 @@
prowl.on_warning = true
prowl.on_failure = true
- prowl.application = 'my_application'
- prowl.api_key = 'my_api_key'
+ prowl.application = "my_application"
+ prowl.api_key = "my_api_key"
end
View
8 templates/cli/utility/notifier/twitter
@@ -6,8 +6,8 @@
tweet.on_warning = true
tweet.on_failure = true
- tweet.consumer_key = 'my_consumer_key'
- tweet.consumer_secret = 'my_consumer_secret'
- tweet.oauth_token = 'my_oauth_token'
- tweet.oauth_token_secret = 'my_oauth_token_secret'
+ tweet.consumer_key = "my_consumer_key"
+ tweet.consumer_secret = "my_consumer_secret"
+ tweet.oauth_token = "my_oauth_token"
+ tweet.oauth_token_secret = "my_oauth_token_secret"
end
View
22 templates/cli/utility/storage/cloud_files
@@ -0,0 +1,22 @@
+ ##
+ # Rackspace Cloud Files [Storage]
+ #
+ # Available Auth URLs:
+ #
+ # - https://auth.api.rackspacecloud.com (US - Default)
+ # - https://lon.auth.api.rackspacecloud.com (UK)
+ #
+ # Servicenet:
+ #
+ # Set this to 'true' if Backup runs on a Rackspace server. It will avoid
+ # transfer charges and it's more performant.
+ #
+ store_with CloudFiles do |cf|
+ cf.api_key = "my_api_key"
+ cf.username = "my_username"
+ cf.container = "my_container"
+ cf.path = "/path/to/my/backups"
+ cf.keep = 5
+ cf.auth_url = "lon.auth.api.rackspacecloud.com"
+ cf.servicenet = false
+ end
View
12 templates/cli/utility/storage/cloudfiles
@@ -1,12 +0,0 @@
- ##
- # Rackspace Cloud Files [Storage]
- #
- store_with CloudFiles do |cf|
- cf.api_key = 'my_api_key'
- cf.username = 'my_username'
- cf.container = 'my_container'
- cf.path = '/path/to/my/backups'
- cf.keep = 5
- cf.auth_url = 'lon.auth.api.rackspacecloud.com'
- cf.servicenet = false
- end
View
25 templates/cli/utility/storage/dropbox
@@ -1,15 +1,20 @@
##
# Dropbox File Hosting Service [Storage]
- # Note: Initial backup must be performed manually to authorize
- # this machine with your Dropbox account.
+ #
+ # Access Type:
+ #
+ # - :app_folder (Default)
+ # - :dropbox
+ #
+ # Note:
+ #
+ # Initial backup must be performed manually to authorize
+ # this machine with your Dropbox account.
#
store_with Dropbox do |db|
- db.api_key = 'my_api_key'
- db.api_secret = 'my_api_secret'
- # Dropbox Access Type
- # The default value is :app_folder
- # Change this to :dropbox if needed
- # db.access_type = :dropbox
- db.path = '/path/to/my/backups'
- db.keep = 25
+ db.api_key = "my_api_key"
+ db.api_secret = "my_api_secret"
+ db.access_type = :app_folder
+ db.path = "/path/to/my/backups"
+ db.keep = 25
end
View
8 templates/cli/utility/storage/ftp
@@ -2,11 +2,11 @@
# FTP (File Transfer Protocol) [Storage]
#
store_with FTP do |server|
- server.username = 'my_username'
- server.password = 'my_password'
- server.ip = '123.45.678.90'
+ server.username = "my_username"
+ server.password = "my_password"
+ server.ip = "123.45.678.90"
server.port = 21
- server.path = '~/backups/'
+ server.path = "~/backups/"
server.keep = 5
server.passive_mode = false
end
View
2  templates/cli/utility/storage/local
@@ -2,6 +2,6 @@
# Local (Copy) [Storage]
#
store_with Local do |local|
- local.path = '~/backups/'
+ local.path = "~/backups/"
local.keep = 5
end
View
6 templates/cli/utility/storage/ninefold
@@ -2,8 +2,8 @@
# Ninefold Cloud Storage [Storage]
#
store_with Ninefold do |nf|
- nf.storage_token = 'my_storage_token'
- nf.storage_secret = 'my_storage_secret'
- nf.path = '/path/to/my/backups'
+ nf.storage_token = "my_storage_token"
+ nf.storage_secret = "my_storage_secret"
+ nf.path = "/path/to/my/backups"
nf.keep = 10
end
View
8 templates/cli/utility/storage/rsync
@@ -2,10 +2,10 @@
# RSync [Storage]
#
store_with RSync do |server|
- server.username = 'my_username'
- server.password = 'my_password'
- server.ip = '123.45.678.90'
+ server.username = "my_username"
+ server.password = "my_password"
+ server.ip = "123.45.678.90"
server.port = 22
- server.path = '~/backups/'
+ server.path = "~/backups/"
server.local = false
end
View
12 templates/cli/utility/storage/s3
@@ -10,10 +10,10 @@
# - us-west-1
#
store_with S3 do |s3|
- s3.access_key_id = 'my_access_key_id'
- s3.secret_access_key = 'my_secret_access_key'
- s3.region = 'us-east-1'
- s3.bucket = 'bucket-name'
- s3.path = '/path/to/my/backups'
- s3.keep = 10
+ s3.access_key_id = "my_access_key_id"
+ s3.secret_access_key = "my_secret_access_key"
+ s3.region = "us-east-1"
+ s3.bucket = "bucket-name"
+ s3.path = "/path/to/my/backups"
+ s3.keep = 10
end
View
8 templates/cli/utility/storage/scp
@@ -2,10 +2,10 @@
# SCP (Secure Copy) [Storage]
#
store_with SCP do |server|
- server.username = 'my_username'
- server.password = 'my_password'
- server.ip = '123.45.678.90'
+ server.username = "my_username"
+ server.password = "my_password"
+ server.ip = "123.45.678.90"
server.port = 22
- server.path = '~/backups/'
+ server.path = "~/backups/"
server.keep = 5
end
View
8 templates/cli/utility/storage/sftp
@@ -2,10 +2,10 @@
# SFTP (Secure File Transfer Protocol) [Storage]
#
store_with SFTP do |server|
- server.username = 'my_username'
- server.password = 'my_password'
- server.ip = '123.45.678.90'
+ server.username = "my_username"
+ server.password = "my_password"
+ server.ip = "123.45.678.90"
server.port = 22
- server.path = '~/backups/'
+ server.path = "~/backups/"
server.keep = 5
end
View
48 templates/cli/utility/syncer/cloud_files
@@ -0,0 +1,48 @@
+ ##
+ # Rackspace Cloud Files [Syncer]
+ #
+ # Available Auth URLs:
+ #
+ # - https://auth.api.rackspacecloud.com (US - Default)
+ # - https://lon.auth.api.rackspacecloud.com (UK)
+ #
+ # Servicenet:
+ #
+ # Set this to 'true' if Backup runs on a Rackspace server. It will avoid
+ # transfer charges and it's more performant.
+ #
+ # Mirroring:
+ #
+ # When enabled it will keep an exact mirror of your filesystem on Cloud Files. This means
+ # that when you remove a file from the filesystem, it will also remote it from Cloud Files.
+ #
+ # Concurrency (concurrency_type):
+ #
+ # - :threads (recommended)
+ # - :processes
+ # - false
+ #
+ # Threads are a good choice for such sync operations and doesn't consume any additional memory.
+ # Using :processes is discouraged as it's likely to consumes a lot of memory.
+ #
+ # Concurrency (concurrency_level):
+ #
+ # Defaults to 2, the higher the level, the faster it will sync.
+ # If you want a high concurrency level (>2), use :threads and not :processes.
+ #
+ sync_with CloudFiles do |cf|
+ cf.username = "my_username"
+ cf.api_key = "my_api_key"
+ cf.container = "my_container"
+ cf.auth_url = "https://auth.api.rackspacecloud.com"
+ cf.servicenet = false
+ cf.path = "/backups"
+ cf.mirror = true
+ cf.concurrency_type = :threads
+ cf.concurrency_level = 50
+
+ cf.directories do |directory|
+ directory.add "/path/to/directory/to/sync"
+ directory.add "/path/to/other/directory/to/sync"
+ end
+ end
View
32 templates/cli/utility/syncer/s3
@@ -1,12 +1,42 @@
##
- # Amazon Simple Storage Service [Syncer]
+ # Amazon S3 [Syncer]
+ #
+ # Available Regions:
+ #
+ # - ap-northeast-1
+ # - ap-southeast-1
+ # - eu-west-1
+ # - us-east-1
+ # - us-west-1
+ #
+ # Mirroring:
+ #
+ # When enabled it will keep an exact mirror of your filesystem on S3. This means
+ # that when you remove a file from the filesystem, it will also remote it from S3.
+ #
+ # Concurrency (concurrency_type):
+ #
+ # - :threads (recommended)
+ # - :processes
+ # - false
+ #
+ # Threads are a good choice for such sync operations and doesn't consume any additional memory.
+ # Using :processes is discouraged as it's likely to consumes a lot of memory.
+ #
+ # Concurrency (concurrency_level):
+ #
+ # Defaults to 2, the higher the level, the faster it will sync.
+ # If you want a high concurrency level (>2), use :threads and not :processes.
#
sync_with S3 do |s3|
s3.access_key_id = "my_access_key_id"
s3.secret_access_key = "my_secret_access_key"
s3.bucket = "my-bucket"
+ s3.region = "us-east-1"
s3.path = "/backups"
s3.mirror = true
+ s3.concurrency_type = :threads
+ s3.concurrency_level = 50
s3.directories do |directory|
directory.add "/path/to/directory/to/sync"
Please sign in to comment.
Something went wrong with that request. Please try again.