Permalink
Browse files

spec-live

  • Loading branch information...
1 parent 603c46a commit cbc323513ea960951c64709cb7c56d4ebe3eb3a4 Brian D. Burns committed Jan 9, 2012
View
6 spec-live/.gitignore
@@ -0,0 +1,6 @@
+backups/config.yml
+backups/data
+backups/log
+backups/.cache
+backups/.tmp
+tmp/
View
7 spec-live/README
@@ -0,0 +1,7 @@
+== spec-live ==
+
+This folder contains "Live" specs which test various features against
+the actual filesystem and/or real storage service accounts.
+
+These are only intended to be used by developers.
+Use at your own risk :)
View
144 spec-live/backups/config.rb
@@ -0,0 +1,144 @@
+
+##
+# Archive Job
+archive_job = lambda do |archive|
+ archive.add File.expand_path('../../../lib/backup', __FILE__)
+ archive.exclude File.expand_path('../../../lib/backup/storage', __FILE__)
+end
+
+##
+# Configuration
+
+Backup::Configuration::Storage::Local.defaults do |storage|
+ storage.path = Backup::SpecLive::TMP_PATH
+ storage.keep = 2
+end
+
+# SSH operations can be tested against 'localhost'
+# To do this, in the config.yml file:
+# - set username/password for your current user
+# - set ip to 'localhost'
+# Although optional, it's recommended you set the 'path'
+# to the same path as Backup::SpecLive::TMP_PATH
+# i.e. '/absolute/path/to/spec-live/tmp'
+# This way, cleaning the "remote path" can be skipped.
+Backup::Configuration::Storage::SCP.defaults do |storage|
+ opts = SpecLive::CONFIG['storage']['scp']
+
+ storage.username = opts['username']
+ storage.password = opts['password']
+ storage.ip = opts['ip']
+ storage.port = opts['port']
+ storage.path = opts['path']
+ storage.keep = 2
+end
+
+Backup::Configuration::Storage::Dropbox.defaults do |storage|
+ opts = SpecLive::CONFIG['storage']['dropbox']
+
+ storage.api_key = opts['api_key']
+ storage.api_secret = opts['api_secret']
+ storage.access_type = opts['access_type']
+ storage.path = opts['path']
+ storage.keep = 2
+end
+
+Backup::Configuration::Notifier::Mail.defaults do |notifier|
+ opts = SpecLive::CONFIG['notifier']['mail']
+
+ notifier.on_success = true
+ notifier.on_warning = true
+ notifier.on_failure = true
+
+ notifier.delivery_method = opts['delivery_method']
+ notifier.from = opts['from']
+ notifier.to = opts['to']
+ notifier.address = opts['address']
+ notifier.port = opts['port'] || 587
+ notifier.domain = opts['domain']
+ notifier.user_name = opts['user_name']
+ notifier.password = opts['password']
+ notifier.authentication = opts['authentication'] || 'plain'
+ notifier.enable_starttls_auto = opts['enable_starttls_auto'] || true
+ notifier.sendmail = opts['sendmail']
+ notifier.sendmail_args = opts['sendmail_args']
+ notifier.mail_folder = Backup::SpecLive::TMP_PATH
+end
+
+Backup::Configuration::Syncer::Cloud::S3.defaults do |s3|
+ opts = SpecLive::CONFIG['syncer']['cloud']['s3']
+
+ s3.access_key_id = opts['access_key_id']
+ s3.secret_access_key = opts['secret_access_key']
+ s3.bucket = opts['bucket']
+ s3.region = opts['region']
+ s3.mirror = true
+end
+
+##
+# Models
+
+Backup::Model.new(:archive_local, 'test_label') do
+ archive :test_archive, &archive_job
+ store_with Local
+end
+
+Backup::Model.new(:archive_scp, 'test_label') do
+ archive :test_archive, &archive_job
+ store_with SCP
+end
+
+# To initialize the Dropbox session cache, run manually first using:
+# VERBOSE=1 rspec spec-live/storage/dropbox_spec.rb --tag init
+Backup::Model.new(:archive_dropbox, 'test_label') do
+ archive :test_archive, &archive_job
+ store_with Dropbox
+end
+
+Backup::Model.new(:compressor_gzip_archive_local, 'test_label') do
+ archive :test_archive, &archive_job
+ compress_with Gzip
+ store_with Local
+end
+
+Backup::Model.new(:notifier_mail, 'test_label') do
+ notify_by Mail
+end
+
+Backup::Model.new(:notifier_mail_file, 'test_label') do
+ notify_by Mail do |mail|
+ mail.to = 'test@backup'
+ mail.delivery_method = :file
+ end
+end
+
+Backup::Model.new(:syncer_cloud_s3, 'test_label') do
+ sync_with Cloud::S3 do |s3|
+ s3.directories do
+ add File.join(Backup::SpecLive::SYNC_PATH, 'dir_a')
+ add File.join(Backup::SpecLive::SYNC_PATH, 'dir_b')
+ end
+ end
+end
+
+Backup::Model.new(:syncer_cloud_processes_s3, 'test_label') do
+ sync_with Cloud::S3 do |s3|
+ s3.concurrency_type = :processes
+ s3.concurrency_level = 2
+ s3.directories do
+ add File.join(Backup::SpecLive::SYNC_PATH, 'dir_a')
+ add File.join(Backup::SpecLive::SYNC_PATH, 'dir_b')
+ end
+ end
+end
+
+Backup::Model.new(:syncer_cloud_threads_s3, 'test_label') do
+ sync_with Cloud::S3 do |s3|
+ s3.concurrency_type = :threads
+ s3.concurrency_level = 2
+ s3.directories do
+ add File.join(Backup::SpecLive::SYNC_PATH, 'dir_a')
+ add File.join(Backup::SpecLive::SYNC_PATH, 'dir_b')
+ end
+ end
+end
View
43 spec-live/backups/config.yml.template
@@ -0,0 +1,43 @@
+##
+# config.yml template
+# for usage, see:
+# spec-live/spec_helper.rb
+# spec-live/backups/config.rb
+##
+---
+storage:
+ scp:
+ specs_enabled: false
+ username: <current user name>
+ password: <password>
+ ip: localhost
+ port: 22
+ path: /absolute/path/to/spec-live/tmp
+ dropbox:
+ specs_enabled: false
+ api_key: <your key>
+ api_secret: <your secret>
+ path:
+ timeout:
+notifier:
+ mail:
+ specs_enabled: false
+ delivery_method: smtp
+ from: <from email>
+ to: <to email>
+ address: smtp.gmail.com
+ port: 587
+ user_name: <user name>
+ password: <password>
+ authentication: plain
+ enable_starttls_auto: true
+ sendmail:
+ sendmail_args:
+syncer:
+ cloud:
+ s3:
+ specs_enabled: false
+ access_key_id:
+ secret_access_key:
+ bucket:
+ region:
View
30 spec-live/compressor/gzip_spec.rb
@@ -0,0 +1,30 @@
+# encoding: utf-8
+
+require File.expand_path('../../spec_helper.rb', __FILE__)
+
+describe 'Compressor::Gzip' do
+
+ def archive_file_for(model)
+ File.join(
+ Backup::SpecLive::TMP_PATH,
+ "#{model.trigger}", model.time, "#{model.trigger}.tar"
+ )
+ end
+
+ def archive_contents_for(model)
+ archive_file = archive_file_for(model)
+ %x{ tar -tvf #{archive_file} }
+ end
+
+ it 'should compress an archive' do
+ model = h_set_trigger('compressor_gzip_archive_local')
+ model.perform!
+ archive_file = archive_file_for(model)
+ File.exist?(archive_file).should be_true
+ archive_contents_for(model).should match(
+ /compressor_gzip_archive_local\/archives\/test_archive\.tar\.gz/
+ )
+ File.stat(archive_file).size.should be > 0
+ end
+
+end
View
85 spec-live/notifier/mail_spec.rb
@@ -0,0 +1,85 @@
+# encoding: utf-8
+
+require File.expand_path('../../spec_helper.rb', __FILE__)
+
+describe 'Notifier::Mail',
+ :if => Backup::SpecLive::CONFIG['notifier']['mail']['specs_enabled'] do
+ describe 'Notifier::Mail :smtp' do
+ let(:trigger) { 'notifier_mail' }
+
+ it 'should send a success email' do
+ model = h_set_trigger(trigger)
+ expect do
+ model.perform!
+ end.not_to raise_error
+ end
+
+ it 'should send a warning email' do
+ model = h_set_trigger(trigger)
+ Backup::Logger.warn 'You have been warned!'
+ expect do
+ model.perform!
+ end.not_to raise_error
+ end
+
+ it 'should send a failure email for non-fatal errors' do
+ model = h_set_trigger(trigger)
+ model.stubs(:databases).raises('A successful failure?')
+ expect do
+ model.perform!
+ end.not_to raise_error
+ end
+
+ it 'should send a failure email fatal errors' do
+ model = h_set_trigger(trigger)
+ model.stubs(:databases).raises(NoMemoryError, 'with increasing frequency...')
+ expect do
+ model.perform!
+ end.to raise_error
+ end
+ end # describe 'Notifier::Mail :smtp'
+
+ describe 'Notifier::Mail :file' do
+ let(:trigger) { 'notifier_mail_file' }
+ let(:test_email) { File.join(Backup::SpecLive::TMP_PATH, 'test@backup') }
+
+ it 'should send a success email' do
+ model = h_set_trigger(trigger)
+ expect do
+ model.perform!
+ end.not_to raise_error
+ File.exist?(test_email).should be_true
+ File.read(test_email).should match(/without any errors/)
+ end
+
+ it 'should send a warning email' do
+ model = h_set_trigger(trigger)
+ Backup::Logger.warn 'You have been warned!'
+ expect do
+ model.perform!
+ end.not_to raise_error
+ File.exist?(test_email).should be_true
+ File.read(test_email).should match(/You have been warned/)
+ end
+
+ it 'should send a failure email for non-fatal errors' do
+ model = h_set_trigger(trigger)
+ model.stubs(:databases).raises('A successful failure?')
+ expect do
+ model.perform!
+ end.not_to raise_error
+ File.exist?(test_email).should be_true
+ File.read(test_email).should match(/successful failure/)
+ end
+
+ it 'should send a failure email fatal errors' do
+ model = h_set_trigger(trigger)
+ model.stubs(:databases).raises(NoMemoryError, 'with increasing frequency...')
+ expect do
+ model.perform!
+ end.to raise_error
+ File.exist?(test_email).should be_true
+ File.read(test_email).should match(/with increasing frequency/)
+ end
+ end # describe 'Notifier::Mail :file'
+end
View
85 spec-live/spec_helper.rb
@@ -0,0 +1,85 @@
+# encoding: utf-8
+
+## # Use Bundler
+require 'rubygems' if RUBY_VERSION < '1.9'
+require 'bundler/setup'
+
+##
+# Load Backup
+require 'backup'
+
+module Backup
+ module SpecLive
+ PATH = File.expand_path('..', __FILE__)
+ # to archive local backups, etc...
+ TMP_PATH = PATH + '/tmp'
+ SYNC_PATH = PATH + '/sync'
+
+ config = PATH + '/backups/config.yml'
+ if File.exist?(config)
+ CONFIG = YAML.load_file(config)
+ else
+ puts "The 'spec-live/backups/config.yml' file is required."
+ puts "Use 'spec-live/backups/config.yml.template' to create one"
+ exit!
+ end
+
+ module ExampleHelpers
+
+ def h_set_trigger(trigger)
+ Backup::Logger.clear!
+ Backup::Model.all.clear
+ Backup::Config.load_config!
+ FileUtils.mkdir_p(File.join(Backup::Config.data_path, trigger))
+ Backup::Model.find(trigger)
+ end
+
+ def h_clean_data_paths!
+ paths = [:data_path, :log_path, :tmp_path ].map do |name|
+ Backup::Config.send(name)
+ end + [Backup::SpecLive::TMP_PATH]
+ paths.each do |path|
+ h_safety_check(path)
+ FileUtils.rm_rf(path)
+ FileUtils.mkdir_p(path)
+ end
+ end
+
+ def h_safety_check(path)
+ # Rule #1: Do No Harm.
+ unless (
+ path.start_with?(Backup::SpecLive::PATH) &&
+ Backup::SpecLive::PATH.end_with?('spec-live')
+ ) || path.include?('spec_live_test_dir')
+ warn "\nSafety Check Failed:\nPath: #{path}\n\n" +
+ caller(1).join("\n")
+ exit!
+ end
+ end
+
+ end # ExampleHelpers
+ end
+
+ Config.update(:root_path => SpecLive::PATH + '/backups')
+
+ Logger.quiet = true unless ENV['VERBOSE']
+end
+
+##
+# Use Mocha to mock with RSpec
+require 'rspec'
+RSpec.configure do |config|
+ config.mock_with :mocha
+ config.include Backup::SpecLive::ExampleHelpers
+ config.before(:each) do
+ h_clean_data_paths!
+ if ENV['VERBOSE']
+ /spec-live\/(.*):/ =~ self.example.metadata[:example_group][:block].inspect
+ puts "\n\nSPEC: #{$1}"
+ puts "DESC: #{self.example.metadata[:full_description]}"
+ puts '-' * 78
+ end
+ end
+end
+
+puts "\n\nRuby version: #{RUBY_DESCRIPTION}\n\n"
View
151 spec-live/storage/dropbox_spec.rb
@@ -0,0 +1,151 @@
+# encoding: utf-8
+
+require File.expand_path('../../spec_helper.rb', __FILE__)
+
+describe 'Storage::Dropbox',
+ :if => Backup::SpecLive::CONFIG['storage']['dropbox']['specs_enabled'] do
+ let(:trigger) { 'archive_dropbox' }
+
+ def remote_files_for(storage, package)
+ remote_path = storage.send(:remote_path_for, package)
+
+ files = []
+ storage.send(:transferred_files_for, package) do |local_file, remote_file|
+ files << File.join(remote_path, remote_file)
+ end
+ files
+ end
+
+ def check_remote_for(storage, package, expectation = true)
+ remote_path = storage.send(:remote_path_for, package)
+
+ # search the remote_path folder for the trigger (base file name)
+ metadata = storage.send(:connection).search(
+ remote_path, package.trigger
+ )
+ files_found = metadata.map {|entry| File.basename(entry['path']) }
+
+ files = remote_files_for(storage, package).map {|file| File.basename(file) }
+
+ if expectation
+ files_found.sort.should == files.sort
+ else
+ files_found.should be_empty
+ end
+ end
+
+ def clean_remote!(storage, package)
+ storage.send(:remove!, package)
+ end
+
+ it 'should store the archive on the remote', :init => true do
+ model = h_set_trigger(trigger)
+
+ model.perform!
+
+ storage = model.storages.first
+ package = model.package
+ files = remote_files_for(storage, package)
+ files.count.should == 1
+
+ check_remote_for(storage, package)
+
+ clean_remote!(storage, package)
+ end
+
+ describe 'Storage::Dropbox Cycling' do
+ context 'when archives exceed `keep` setting' do
+ it 'should remove the oldest archive' do
+ packages = []
+
+ model = h_set_trigger(trigger)
+ storage = model.storages.first
+ model.perform!
+ package = model.package
+ package.filenames.count.should == 1
+ packages << package
+ sleep 1
+
+ check_remote_for(storage, packages[0])
+
+ model = h_set_trigger(trigger)
+ storage = model.storages.first
+ model.perform!
+ package = model.package
+ package.filenames.count.should == 1
+ packages << package
+ sleep 1
+
+ check_remote_for(storage, packages[1])
+
+ model = h_set_trigger(trigger)
+ storage = model.storages.first
+ model.perform!
+ package = model.package
+ package.filenames.count.should == 1
+ packages << package
+
+ check_remote_for(storage, packages[2])
+ clean_remote!(storage, packages[2])
+
+ check_remote_for(storage, packages[1])
+ clean_remote!(storage, packages[1])
+
+ check_remote_for(storage, packages[0], false)
+ end
+ end
+
+ context 'when an archive to be removed does not exist' do
+ it 'should log a warning and continue' do
+ packages = []
+
+ model = h_set_trigger(trigger)
+ storage = model.storages.first
+ model.perform!
+ package = model.package
+ package.filenames.count.should == 1
+ packages << package
+ sleep 1
+
+ check_remote_for(storage, packages[0])
+
+ model = h_set_trigger(trigger)
+ storage = model.storages.first
+ model.perform!
+ package = model.package
+ package.filenames.count.should == 1
+ packages << package
+
+ check_remote_for(storage, packages[1])
+
+ # remove archive directory cycle! will attempt to remove
+ clean_remote!(storage, packages[0])
+
+ check_remote_for(storage, packages[0], false)
+
+ check_remote_for(storage, packages[1])
+
+
+ model = h_set_trigger(trigger)
+ storage = model.storages.first
+ expect do
+ model.perform!
+ end.not_to raise_error
+
+ Backup::Logger.has_warnings?.should be_true
+
+ package = model.package
+ package.filenames.count.should == 1
+ packages << package
+
+ check_remote_for(storage, packages[1])
+ clean_remote!(storage, packages[1])
+
+ check_remote_for(storage, packages[2])
+ clean_remote!(storage, packages[2])
+ end
+ end
+
+ end # describe 'Storage::SCP Cycling'
+
+end
View
83 spec-live/storage/local_spec.rb
@@ -0,0 +1,83 @@
+# encoding: utf-8
+
+require File.expand_path('../../spec_helper.rb', __FILE__)
+
+describe 'Storage::Local' do
+ let(:trigger) { 'archive_local' }
+
+ def archive_file_for(model)
+ File.join(
+ Backup::SpecLive::TMP_PATH,
+ "#{model.trigger}", model.time, "#{model.trigger}.tar"
+ )
+ end
+
+ it 'should store a local archive' do
+ model = h_set_trigger(trigger)
+ model.perform!
+ File.exist?(archive_file_for(model)).should be_true
+ end
+
+ describe 'Storage::Local Cycling' do
+
+ context 'when archives exceed `keep` setting' do
+ it 'should remove the oldest archive' do
+ archives = []
+
+ model = h_set_trigger(trigger)
+ model.perform!
+ archives << archive_file_for(model)
+ sleep 1
+
+ model = h_set_trigger(trigger)
+ model.perform!
+ archives << archive_file_for(model)
+ sleep 1
+
+ model = h_set_trigger(trigger)
+ model.perform!
+ archives << archive_file_for(model)
+
+ File.exist?(archives[0]).should be_false
+ File.exist?(archives[1]).should be_true
+ File.exist?(archives[2]).should be_true
+ end
+ end
+
+ context 'when an archive to be removed does not exist' do
+ it 'should log a warning and continue' do
+ archives = []
+
+ model = h_set_trigger(trigger)
+ model.perform!
+ archives << archive_file_for(model)
+ sleep 1
+
+ model = h_set_trigger(trigger)
+ model.perform!
+ archives << archive_file_for(model)
+ sleep 1
+
+ File.exist?(archives[0]).should be_true
+ File.exist?(archives[1]).should be_true
+ # remove archive directory cycle! will attempt to remove
+ dir = archives[0].split('/')[0...-1].join('/')
+ h_safety_check(dir)
+ FileUtils.rm_r(dir)
+ File.exist?(archives[0]).should be_false
+
+ expect do
+ model = h_set_trigger(trigger)
+ model.perform!
+ archives << archive_file_for(model)
+ end.not_to raise_error
+
+ Backup::Logger.has_warnings?.should be_true
+
+ File.exist?(archives[1]).should be_true
+ File.exist?(archives[2]).should be_true
+ end
+ end
+
+ end # describe 'Storage::Local Cycling'
+end
View
193 spec-live/storage/scp_spec.rb
@@ -0,0 +1,193 @@
+# encoding: utf-8
+
+require File.expand_path('../../spec_helper.rb', __FILE__)
+
+describe 'Storage::SCP',
+ :if => Backup::SpecLive::CONFIG['storage']['scp']['specs_enabled'] do
+ let(:trigger) { 'archive_scp' }
+
+ def remote_files_for(storage, package)
+ remote_path = storage.send(:remote_path_for, package)
+
+ files = []
+ storage.send(:transferred_files_for, package) do |local_file, remote_file|
+ files << File.join(remote_path, remote_file)
+ end
+ files
+ end
+
+ def check_remote_for(storage, files)
+ if (storage.username == Backup::Config.user) &&
+ (storage.ip == 'localhost')
+ files.each do |file|
+ if !File.exist?(file)
+ return false
+ end
+ end
+ true
+ else
+ errors = []
+ storage.send(:connection) do |ssh|
+ files.each do |file|
+ ssh.exec!("ls '#{file}'") do |ch, stream, data|
+ errors << data if stream == :stderr
+ end
+ end
+ end
+ errors.empty?
+ end
+ end
+
+ def clean_remote!(storage, package)
+ return if (storage.username == Backup::Config.user) &&
+ (storage.ip == 'localhost') &&
+ (storage.path == Backup::SpecLive::TMP_PATH)
+
+ remote_path = storage.send(:remote_path_for, package)
+ h_safety_check(remote_path)
+ storage.send(:connection) do |ssh|
+ ssh.exec!("rm -r '#{remote_path}'")
+ end
+ end
+
+ it 'should store the archive on the remote' do
+ model = h_set_trigger(trigger)
+
+ model.perform!
+
+ storage = model.storages.first
+ package = model.package
+ files = remote_files_for(storage, package)
+ files.count.should == 1
+
+ check_remote_for(storage, files).should be_true
+
+ clean_remote!(storage, package)
+ end
+
+ describe 'Storage::SCP Cycling' do
+ context 'when archives exceed `keep` setting' do
+ it 'should remove the oldest archive' do
+ packages = []
+
+ model = h_set_trigger(trigger)
+ storage = model.storages.first
+ model.perform!
+ package = model.package
+ package.filenames.count.should == 1
+ packages << package
+ sleep 1
+
+ check_remote_for(
+ storage, remote_files_for(storage, packages[0])
+ ).should be_true
+
+ model = h_set_trigger(trigger)
+ storage = model.storages.first
+ model.perform!
+ package = model.package
+ package.filenames.count.should == 1
+ packages << package
+ sleep 1
+
+ check_remote_for(
+ storage, remote_files_for(storage, packages[1])
+ ).should be_true
+
+ model = h_set_trigger(trigger)
+ storage = model.storages.first
+ model.perform!
+ package = model.package
+ package.filenames.count.should == 1
+ packages << package
+
+ check_remote_for(
+ storage, remote_files_for(storage, packages[2])
+ ).should be_true
+ clean_remote!(storage, packages[2])
+
+ check_remote_for(
+ storage, remote_files_for(storage, packages[1])
+ ).should be_true
+ clean_remote!(storage, packages[1])
+
+ check_remote_for(
+ storage, remote_files_for(storage, packages[0])
+ ).should be_false
+ end
+ end
+
+ context 'when an archive to be removed does not exist' do
+ it 'should log a warning and continue' do
+ packages = []
+
+ model = h_set_trigger(trigger)
+ storage = model.storages.first
+ model.perform!
+ package = model.package
+ package.filenames.count.should == 1
+ packages << package
+ sleep 1
+
+ check_remote_for(
+ storage, remote_files_for(storage, packages[0])
+ ).should be_true
+
+ model = h_set_trigger(trigger)
+ storage = model.storages.first
+ model.perform!
+ package = model.package
+ package.filenames.count.should == 1
+ packages << package
+
+ check_remote_for(
+ storage, remote_files_for(storage, packages[1])
+ ).should be_true
+
+ # remove archive directory cycle! will attempt to remove
+
+ if (storage.username == Backup::Config.user) &&
+ (storage.ip == 'localhost')
+ remote_path = storage.send(:remote_path_for, packages[0])
+ h_safety_check(remote_path)
+ FileUtils.rm_r(remote_path)
+ else
+ clean_remote!(storage, packages[0])
+ end
+
+ check_remote_for(
+ storage, remote_files_for(storage, packages[0])
+ ).should be_false
+
+ check_remote_for(
+ storage, remote_files_for(storage, packages[1])
+ ).should be_true
+
+
+ model = h_set_trigger(trigger)
+ storage = model.storages.first
+ expect do
+ model.perform!
+ end.not_to raise_error
+
+ Backup::Logger.has_warnings?.should be_true
+
+ package = model.package
+ package.filenames.count.should == 1
+ packages << package
+
+ check_remote_for(
+ storage, remote_files_for(storage, packages[1])
+ ).should be_true
+ clean_remote!(storage, packages[1])
+
+ check_remote_for(
+ storage, remote_files_for(storage, packages[2])
+ ).should be_true
+ clean_remote!(storage, packages[2])
+ end
+ end
+
+ end # describe 'Storage::SCP Cycling'
+
+end
View
124 spec-live/syncer/cloud/s3_spec.rb
@@ -0,0 +1,124 @@
+# encoding: utf-8
+
+require File.expand_path('../../../spec_helper.rb', __FILE__)
+
+describe 'Syncer::Cloud::S3 - No Concurrency',
+ :if => Backup::SpecLive::CONFIG['syncer']['cloud']['s3']['specs_enabled'] do
+ let(:trigger) { 'syncer_cloud_s3' }
+ let(:model) { h_set_trigger(trigger) }
+
+ before do
+ model # trigger model initialization so Fog is available
+ create_sync_files
+ clean_remote
+ end
+
+ after do
+ clean_sync_dir
+ clean_remote
+ end
+
+ it 'should work' do
+ model.perform!
+ remote_files.map {|file| [file.key, file.etag] }.sort.should == [
+ ["backups/dir_a/one.file", "d3b07384d113edec49eaa6238ad5ff00"],
+ ["backups/dir_b/dir_c/three.file", "d3b07384d113edec49eaa6238ad5ff00"],
+ ["backups/dir_b/two.file", "d3b07384d113edec49eaa6238ad5ff00"]
+ ]
+
+ update_sync_files
+
+ model.perform!
+ remote_files.map {|file| [file.key, file.etag] }.sort.should == [
+ ["backups/dir_a/dir_d/two.new", "14758f1afd44c09b7992073ccf00b43d"],
+ ["backups/dir_a/one.file", "14758f1afd44c09b7992073ccf00b43d"],
+ ["backups/dir_b/dir_c/three.file", "d3b07384d113edec49eaa6238ad5ff00"],
+ ["backups/dir_b/one.new", "14758f1afd44c09b7992073ccf00b43d"]
+ ]
+ end
+
+ private
+
+ ##
+ # Initial Files are MD5: d3b07384d113edec49eaa6238ad5ff00
+ #
+ # ├── dir_a
+ # │   └── one.file
+ # └── dir_b
+ # ├── dir_c
+ # │   └── three.file
+ # ├── bad\xFFfile
+ # └── two.file
+ def create_sync_files
+ clean_sync_dir
+
+ %w{ dir_a dir_b/dir_c }.each do |dir|
+ path = File.join(Backup::SpecLive::SYNC_PATH, dir)
+ FileUtils.mkdir_p(path)
+ end
+
+ %W{ dir_a/one.file
+ dir_b/two.file
+ dir_b/bad\xFFfile
+ dir_b/dir_c/three.file }.each do |file|
+ path = File.join(Backup::SpecLive::SYNC_PATH, file)
+ File.open(path, 'w') {|file| file.puts 'foo' }
+ end
+ end
+
+ ##
+ # Added/Updated Files are MD5: 14758f1afd44c09b7992073ccf00b43d
+ #
+ # ├── dir_a
+ # │   ├── dir_d (add)
+ # │   │   └── two.new (add)
+ # │   └── one.file (update)
+ # └── dir_b
+ # ├── dir_c
+ # │   └── three.file
+ # ├── bad\377file
+ # ├── one.new (add)
+ # └── two.file (remove)
+ def update_sync_files
+ FileUtils.mkdir_p(File.join(Backup::SpecLive::SYNC_PATH, 'dir_a/dir_d'))
+ %w{ dir_a/one.file
+ dir_b/one.new
+ dir_a/dir_d/two.new }.each do |file|
+ path = File.join(Backup::SpecLive::SYNC_PATH, file)
+ File.open(path, 'w') {|file| file.puts 'foobar' }
+ end
+
+ path = File.join(Backup::SpecLive::SYNC_PATH, 'dir_b/two.file')
+ h_safety_check(path)
+ FileUtils.rm(path)
+ end
+
+ def clean_sync_dir
+ path = Backup::SpecLive::SYNC_PATH
+ if File.directory?(path)
+ h_safety_check(path)
+ FileUtils.rm_r(path)
+ end
+ end
+
+ # use a new connection for each request
+ def connection
+ @opts = Backup::SpecLive::CONFIG['syncer']['cloud']['s3']
+ Fog::Storage.new(
+ :provider => 'AWS',
+ :aws_access_key_id => @opts['access_key_id'],
+ :aws_secret_access_key => @opts['secret_access_key'],
+ :region => @opts['region']
+ )
+ end
+
+ def remote_files
+ bucket = connection.directories.get(@opts['bucket'])
+ bucket.files.all(:prefix => 'backups')
+ end
+
+ def clean_remote
+ remote_files.each {|file| file.destroy }
+ end
+
+end

0 comments on commit cbc3235

Please sign in to comment.