Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions backup.gemspec
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ Gem::Specification.new do |gem|

gem.add_dependency "thor", "~> 0.18", ">= 0.18.1"
gem.add_dependency "open4", "1.3.0"
gem.add_dependency "fog", "~> 1.28"
gem.add_dependency "fog", "~> 1.42"
gem.add_dependency "excon", "~> 0.44"
gem.add_dependency "unf", "0.1.3" # for fog/AWS
gem.add_dependency "dropbox-sdk", "1.6.5"
Expand All @@ -38,7 +38,7 @@ Gem::Specification.new do |gem|
gem.add_dependency "net-sftp", "2.1.2"
gem.add_dependency "mail", "~> 2.6", ">= 2.6.6"
gem.add_dependency "pagerduty", "2.0.0"
gem.add_dependency "twitter", "~> 5.5"
gem.add_dependency "twitter", "~> 6"
gem.add_dependency "hipchat", "1.0.1"
gem.add_dependency "flowdock", "0.4.0"
gem.add_dependency "dogapi", "1.11.0"
Expand Down
1 change: 1 addition & 0 deletions lib/backup.rb
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ module Storage
autoload :Base, File.join(STORAGE_PATH, "base")
autoload :Cycler, File.join(STORAGE_PATH, "cycler")
autoload :S3, File.join(STORAGE_PATH, "s3")
autoload :Swift, File.join(STORAGE_PATH, "swift")
autoload :CloudFiles, File.join(STORAGE_PATH, "cloud_files")
autoload :Ninefold, File.join(STORAGE_PATH, "ninefold")
autoload :Dropbox, File.join(STORAGE_PATH, "dropbox")
Expand Down
96 changes: 96 additions & 0 deletions lib/backup/cloud_io/swift.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
require "backup/cloud_io/base"
require "fog/openstack"

LARGE_FILE = 5 * 1024**3 - 1

module Backup
module CloudIO
class Swift < Base
class Error < Backup::Error; end
# Handle fog-openstack namespace change, as they moved everything under
# the OpenStack namespace starting at version 1.0
Storage = if Fog::Storage.const_defined? :OpenStack
Fog::Storage::OpenStack
else
Fog::OpenStack::Storage
end

attr_reader :username, :password, :tenant, :region,
:container, :auth_url, :max_retries,
:retry_waitsec, :fog_options, :batch_size

def initialize(opts = {})
super

@username = opts[:username]
@password = opts[:password]
@tenant = opts[:tenant_name]
@container = opts[:container]
@auth_url = opts[:auth_url]
@region = opts[:region]
@max_retries = opts[:max_retries]
@retry_waitsec = opts[:retry_waitsec]
@batch_size = opts[:batch_size]
@fog_options = opts[:fog_options]
end

def upload(src, dest)
file_size = File.size(src)

raise FileSizeError, <<-EOS if file_size > LARGE_FILE
[FIXME] File Too Large
File: #{src}
Size: #{file_size}
Max Swift Upload Size is #{LARGE_FILE} (5 Gb) (FIXME)
EOS

directory.files.create key: dest, body: File.open(src)
end

def delete(objects_or_keys)
keys = Array(objects_or_keys).dup
keys = keys.map(&:key) unless keys.first.is_a?(String)

until keys.empty?
key = keys.slice!(0, batch_size)
with_retries("DELETE Multiple Objects") do
resp = connection.delete_multiple_objects(container, key)
if resp.data[:status] != 200
raise Error, <<-EOS
Failed to delete.
Status = #{resp.data[:status]}
Reason = #{resp.data[:reason_phrase]}
Body = #{resp.data[:body]}
EOS
end
end
end
end

def objects(prefix)
directory.files.all(prefix: prefix.chomp("/") + "/")
end

private

def directory
@directory ||= connection.directories.get container
end

def connection
@connection ||= begin
opts = {
openstack_auth_url: auth_url,
openstack_username: username,
openstack_api_key: password
}
opts[:openstack_region] = region unless region.nil?
opts[:openstack_tenant] = tenant unless tenant.nil?

opts.merge!(fog_options || {})
Storage.new(opts)
end
end
end
end
end
2 changes: 1 addition & 1 deletion lib/backup/config/dsl.rb
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def add_dsl_constants
[ # Databases
["MySQL", "PostgreSQL", "MongoDB", "Redis", "Riak", "OpenLDAP", "SQLite"],
# Storages
["S3", "CloudFiles", "Dropbox", "FTP",
["S3", "CloudFiles", "Dropbox", "FTP", "Swift",
"SFTP", "SCP", "RSync", "Local", "Qiniu"],
# Compressors
["Gzip", "Bzip2", "Custom"],
Expand Down
124 changes: 124 additions & 0 deletions lib/backup/storage/swift.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
require "backup/cloud_io/swift"

module Backup
module Storage
class Swift < Base
include Storage::Cycler
class Error < Backup::Error; end

##
# Swift credentials
attr_accessor :username, :password

##
# Keystone tenant name if using v2 auth
attr_accessor :tenant_name

##
# Swift container name
attr_accessor :container

##
# Swift region. It might be required for certain providers
attr_accessor :region

##
# OpenStack keystone url
attr_accessor :auth_url

##
# Number of times to retry failed operations.
#
# Default: 10
attr_accessor :max_retries

##
# Time in seconds to pause before each retry.
#
# Default: 30
attr_accessor :retry_waitsec

##
# The size of the batch operations (delete/list/etc.) in your
# OpenStack deployment
#
# Default: 1000
attr_accessor :batch_size

##
# Additional options to pass along to fog.
# e.g. Fog::Storage.new({ :provider => 'Swift' }.merge(fog_options))
attr_accessor :fog_options

def initialize(mode, storage_id = nil)
super

@max_retries ||= 10
@retry_waitsec ||= 30
@path ||= "backups"
@batch_size ||= 1000
@fog_options ||= {}

@path = @path.sub(%r{^\/}, "")

check_configuration
end

private

def cloud_io
@cloud_io ||= CloudIO::Swift.new(
username: username,
password: password,
tenant_name: tenant_name,
region: region,
container: container,
auth_url: auth_url,
max_retries: max_retries,
retry_waitsec: retry_waitsec,
batch_size: batch_size,
fog_options: fog_options
)
end

def transfer!
package.filenames.each do |filename|
src = File.join(Config.tmp_path, filename)
dest = File.join(remote_path, filename)
Logger.info "Storing '#{container}/#{dest}'..."
cloud_io.upload(src, dest)
end
end

# Called by the Cycler.
# Any error raised will be logged as a warning.
def remove!(package)
Logger.info "Removing backup package dated #{package.time}..."

remote_path = remote_path_for(package)
objects = cloud_io.objects(remote_path)

raise Error, "Package at '#{remote_path}' not found" if objects.empty?

cloud_io.delete(objects)
end

def check_configuration
if auth_url.nil?
raise Error, <<-EOS
Configuration Error
Swift auth_url is required
EOS
end

required = [:username, :password, :container, :auth_url]
required << :tenant_name if auth_url =~ /v2/

raise Error, <<-EOS if required.map { |name| send(name) }.any?(&:nil?)
Configuration Error
#{required.map { |name| "##{name}" }.join(", ")} are all required
EOS
end
end
end
end
90 changes: 90 additions & 0 deletions spec/cloud_io/swift_spec.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
require File.expand_path("../../spec_helper.rb", __FILE__)
require "backup/cloud_io/swift"

module Backup
describe CloudIO::Swift do
let(:connection) { mock }
let(:directory) { mock }
let(:files) { mock }
let(:fd) { mock }
let(:response) { mock }
let(:cloud_io) do
CloudIO::Swift.new(container: "my_bucket", batch_size: 5, max_retries: 0)
end

describe "#upload" do
context "when file is larger than 5GB" do
before do
File.expects(:size).with("/src/file").returns(5 * 1024**3)
end

it "raises an error" do
expect do
cloud_io.upload("/src/file", "idontcare")
end.to raise_error CloudIO::FileSizeError
end
end

context "when file is smaller than 5GB" do
before do
File.expects(:size).with("/src/file").returns(512)
File.expects(:open).with("/src/file").returns(fd)
end

it "class #create on the directory" do
cloud_io.expects(:directory).returns(directory)
directory.expects(:files).returns(files)
files.expects(:create).with(key: "/dst/file", body: fd)

cloud_io.upload("/src/file", "/dst/file")
end
end
end

describe "#objects" do
it "call #files on the container model" do
cloud_io.expects(:directory).twice.returns(directory)
directory.expects(:files).twice.returns(files)
files.expects(:all).twice.with(prefix: "/prefix/")

cloud_io.objects("/prefix")
cloud_io.objects("/prefix/")
end
end

describe "#delete" do
let(:key_1) { ["file/path"] }
let(:key_10) { (0...10).to_a.map { |id| "/path/to/file/#{id}" } }
before do
cloud_io.expects(:connection).returns(connection)
end

it "calls connection#delete_multiple_objects" do
connection.expects(:delete_multiple_objects)
.with("my_bucket", key_1)
.returns(response)
response.expects(:data).returns(status: 200)

expect { cloud_io.delete key_1 }.to_not raise_error
end

it "raises an error if status != 200" do
response.expects(:data).at_least(1).returns(
status: 503,
reason_phrase: "give me a reason",
body: "bodybody"
)
connection.expects(:delete_multiple_objects)
.with("my_bucket", key_1)
.returns(response)

expect { cloud_io.delete key_1 }.to raise_error { |err|
expect(err.message).to match(/Failed to delete/)
expect(err.message).to match(/503/)
expect(err.message).to match(/give me a reason/)
expect(err.message).to match(/bodybody/)
}
end
end
end
end
Loading