Skip to content

Commit

Permalink
don't upload files bigger then 5G to s3. issue error instead.
Browse files Browse the repository at this point in the history
  • Loading branch information
vitaly committed Dec 21, 2009
1 parent e04e346 commit 57e7dcb
Show file tree
Hide file tree
Showing 3 changed files with 64 additions and 7 deletions.
6 changes: 5 additions & 1 deletion CHANGELOG
@@ -1,4 +1,8 @@
* fix typo in the template config file. (change option to options in pgdump)
* add example 'options' for tar in the template config file.
* do not try to upload more then 5G of data to S3. print error instead

0.2.5

Safety mesure: Disable overwrite of existing configuration keys except for multi-value keys
* Safety mesure: Disable overwrite of existing configuration keys except for multi-value keys
supported multi-value keys: skip_tables, exclude, files
60 changes: 54 additions & 6 deletions examples/unit/s3_example.rb
Expand Up @@ -15,14 +15,14 @@ def def_config
}
end

def def_backup
def def_backup(extra = {})
{
:kind => "_kind",
:filename => "/backup/somewhere/_kind-_id.NOW.bar",
:extension => ".bar",
:id => "_id",
:timestamp => "NOW"
}
}.merge(extra)
end

def s3(config = def_config, backup = def_backup)
Expand Down Expand Up @@ -104,9 +104,57 @@ def s3(config = def_config, backup = def_backup)
end

describe :save do
it "should establish s3 connection"
it "should RuntimeError if no local file (i.e. :local didn't run)"
it "should open local file"
it "should upload file"
def add_stubs(*stubs)
stubs.each do |s|
case s
when :connection
stub(AWS::S3::Base).establish_connection!(:access_key_id => "_key", :secret_access_key => "_secret", :use_ssl => true)
when :stat
stub(File).stat("foo").stub!.size {123}
when :create_bucket
stub(AWS::S3::Bucket).create
when :file_open
stub(File).open("foo") {|f, block| block.call(:opened_file)}
when :s3_store
stub(AWS::S3::S3Object).store(@full_path, :opened_file, "_bucket")
end
end
end

before(:each) do
@s3 = s3(def_config, def_backup(:path => "foo"))
@full_path = "_kind/_id/backup/somewhere/_kind-_id.NOW.bar.bar"
end

it "should fail if no backup.file is set" do
@s3.backup.path = nil
proc {@s3.send(:save)}.should raise_error(RuntimeError)
end

it "should establish s3 connection" do
mock(AWS::S3::Base).establish_connection!(:access_key_id => "_key", :secret_access_key => "_secret", :use_ssl => true)
add_stubs(:stat, :create_bucket, :file_open, :s3_store)
@s3.send(:save)
end

it "should open local file" do
add_stubs(:connection, :stat, :create_bucket)
mock(File).open("foo")
@s3.send(:save)
end

it "should upload file" do
add_stubs(:connection, :stat, :create_bucket, :file_open)
mock(AWS::S3::S3Object).store(@full_path, :opened_file, "_bucket")
@s3.send(:save)
end

it "should fail on files bigger then 5G" do
add_stubs(:connection)
mock(File).stat("foo").stub!.size {5*1024*1024*1024+1}
mock(STDERR).puts(anything)
dont_allow(Benchmark).realtime
@s3.send(:save)
end
end
end
5 changes: 5 additions & 0 deletions lib/astrails/safe/s3.rb
@@ -1,6 +1,7 @@
module Astrails
module Safe
class S3 < Sink
MAX_S3_FILE_SIZE = 5368709120

protected

Expand All @@ -20,6 +21,10 @@ def save

puts "Uploading #{bucket}:#{full_path}" if $_VERBOSE || $DRY_RUN
unless $DRY_RUN || $LOCAL
if File.stat(@backup.path).size > MAX_S3_FILE_SIZE
STDERR.puts "ERROR: File size exceeds maximum allowed for upload to S3 (#{MAX_S3_FILE_SIZE}): #{@backup.path}"
return
end
benchmark = Benchmark.realtime do
AWS::S3::Bucket.create(bucket)
File.open(@backup.path) do |file|
Expand Down

0 comments on commit 57e7dcb

Please sign in to comment.