Skip to content
This repository has been archived by the owner on Feb 12, 2024. It is now read-only.

Last-Modified and RightAws::Key::exists? changes #9

Open
wants to merge 14 commits into
base: master
Choose a base branch
from
3 changes: 2 additions & 1 deletion lib/fakes3/bucket.rb
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@


module FakeS3 module FakeS3
class Bucket class Bucket
attr_accessor :name,:creation_date,:objects attr_accessor :name,:creation_date,:objects,:opened


def initialize(name,creation_date,objects) def initialize(name,creation_date,objects)
@name = name @name = name
Expand All @@ -14,6 +14,7 @@ def initialize(name,creation_date,objects)
objects.each do |obj| objects.each do |obj|
@objects.add(obj) @objects.add(obj)
end end
@opened = false
@mutex = Mutex.new @mutex = Mutex.new
end end


Expand Down
87 changes: 47 additions & 40 deletions lib/fakes3/file_store.rb
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -53,6 +53,35 @@ def get_bucket(bucket)
@bucket_hash[bucket] @bucket_hash[bucket]
end end


def get_sorted_object_list(bucket)
list = SortedObjectList.new
for object in get_objects_under_path(bucket, "")
list.add(object)
end
return list
end

def get_objects_under_path(bucket, path)
objects = []
current = File.join(@root, bucket.name, path)
Dir.entries(current).each do |file|
next if file =~ /^\./
if path.empty?
new_path = file
else
new_path = File.join(path, file)
end
if File.directory?(File.join(current, file, SHUCK_METADATA_DIR))
objects.push(get_object(bucket.name, new_path, ""))
else
objects |= get_objects_under_path(bucket, new_path)
end
end

return objects
end
private :get_objects_under_path

def create_bucket(bucket) def create_bucket(bucket)
FileUtils.mkdir_p(File.join(@root,bucket)) FileUtils.mkdir_p(File.join(@root,bucket))
bucket_obj = Bucket.new(bucket,Time.now,[]) bucket_obj = Bucket.new(bucket,Time.now,[])
Expand All @@ -71,16 +100,16 @@ def delete_bucket(bucket_name)
@bucket_hash.delete(bucket_name) @bucket_hash.delete(bucket_name)
end end


def get_object(bucket,object_name, request) def get_object(bucket_name,object_name, request)
begin begin
real_obj = S3Object.new real_obj = S3Object.new
obj_root = File.join(@root,bucket,object_name,SHUCK_METADATA_DIR) obj_root = File.join(@root,bucket_name,object_name,SHUCK_METADATA_DIR)
metadata = YAML.load(File.open(File.join(obj_root,"metadata"),'rb')) metadata = YAML.load_file(File.join(obj_root,"metadata"))
real_obj.name = object_name real_obj.name = object_name
real_obj.md5 = metadata[:md5] real_obj.md5 = metadata[:md5]
real_obj.content_type = metadata.fetch(:content_type) { "application/octet-stream" } real_obj.content_type = metadata.fetch(:content_type) { "application/octet-stream" }
#real_obj.io = File.open(File.join(obj_root,"content"),'rb') #real_obj.io = File.open(File.join(obj_root,"content"),'rb')
real_obj.io = RateLimitableFile.open(File.join(obj_root,"content"),'rb') real_obj.io = RateLimitableFile.new(File.join(obj_root,"content"))
real_obj.size = metadata.fetch(:size) { 0 } real_obj.size = metadata.fetch(:size) { 0 }
real_obj.creation_date = File.ctime(obj_root).iso8601() real_obj.creation_date = File.ctime(obj_root).iso8601()
real_obj.modified_date = metadata.fetch(:modified_date) { File.mtime(File.join(obj_root,"content")).iso8601() } real_obj.modified_date = metadata.fetch(:modified_date) { File.mtime(File.join(obj_root,"content")).iso8601() }
Expand All @@ -96,45 +125,22 @@ def object_metadata(bucket,object)
end end


def copy_object(src_bucket_name,src_name,dst_bucket_name,dst_name) def copy_object(src_bucket_name,src_name,dst_bucket_name,dst_name)
src_root = File.join(@root,src_bucket_name,src_name,SHUCK_METADATA_DIR) obj = nil
src_metadata_filename = File.join(src_root,"metadata") if src_bucket_name == dst_bucket_name && src_name == dst_name
src_metadata = YAML.load(File.open(src_metadata_filename,'rb').read) # source and destination are the same, nothing to do but
src_content_filename = File.join(src_root,"content") # find current object so it can be returned

obj = src_bucket.find(src_name)
dst_filename= File.join(@root,dst_bucket_name,dst_name) else
FileUtils.mkdir_p(dst_filename) src_root = File.join(@root,src_bucket_name,src_name,SHUCK_METADATA_DIR)

dst_root = File.join(@root,dst_bucket_name,dst_name,SHUCK_METADATA_DIR)
metadata_dir = File.join(dst_filename,SHUCK_METADATA_DIR)
FileUtils.mkdir_p(metadata_dir)

content = File.join(metadata_dir,"content")
metadata = File.join(metadata_dir,"metadata")


File.open(content,'wb') do |f| FileUtils.mkdir_p(dst_root)
File.open(src_content_filename,'rb') do |input| FileUtils.copy_file(File.join(src_root,"content"),File.join(dst_root,"content"))
f << input.read FileUtils.copy_file(File.join(src_root,"metadata"), File.join(dst_root,"metadata"))
end
end


File.open(metadata,'w') do |f| dst_bucket = self.get_bucket(dst_bucket_name)
File.open(src_metadata_filename,'r') do |input| dst_bucket.add(get_object(dst_bucket.name, dst_name, ""))
f << input.read
end
end end

src_bucket = self.get_bucket(src_bucket_name)
dst_bucket = self.get_bucket(dst_bucket_name)

obj = S3Object.new
obj.name = dst_name
obj.md5 = src_metadata[:md5]
obj.content_type = src_metadata[:content_type]
obj.size = src_metadata[:size]
obj.modified_date = src_metadata[:modified_date]

src_obj = src_bucket.find(src_name)
dst_bucket.add(obj)
src_bucket.remove(src_obj)
return obj return obj
end end


Expand Down Expand Up @@ -174,6 +180,7 @@ def store_object(bucket,object_name,request)
obj.md5 = metadata_struct[:md5] obj.md5 = metadata_struct[:md5]
obj.content_type = metadata_struct[:content_type] obj.content_type = metadata_struct[:content_type]
obj.size = metadata_struct[:size] obj.size = metadata_struct[:size]
obj.creation_date = File.ctime(metadata_dir)
obj.modified_date = metadata_struct[:modified_date] obj.modified_date = metadata_struct[:modified_date]


bucket.add(obj) bucket.add(obj)
Expand Down
23 changes: 20 additions & 3 deletions lib/fakes3/rate_limitable_file.rb
Original file line number Original file line Diff line number Diff line change
@@ -1,5 +1,12 @@
module FakeS3 module FakeS3
class RateLimitableFile < File class RateLimitableFile

attr_reader :path, :pos
def initialize(path, pos = nil)
@path = path
@pos = pos
end

@@rate_limit = nil @@rate_limit = nil
# Specify a rate limit in bytes per second # Specify a rate limit in bytes per second
def self.rate_limit def self.rate_limit
Expand All @@ -10,12 +17,22 @@ def self.rate_limit=(rate_limit)
@@rate_limit = rate_limit @@rate_limit = rate_limit
end end


def read(args) def read(args = nil)
if @@rate_limit if @@rate_limit
time_to_sleep = args / @@rate_limit time_to_sleep = args / @@rate_limit
sleep(time_to_sleep) sleep(time_to_sleep)
end end
return super(args)
return File.open(@path) do |file|
if !pos.nil?
file.pos = @pos
end
if args.nil?
file.read
else
file.read(args)
end
end
end end
end end
end end
14 changes: 10 additions & 4 deletions lib/fakes3/server.rb
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -58,6 +58,10 @@ def do_GET(request, response)
when 'LS_BUCKET' when 'LS_BUCKET'
bucket_obj = @store.get_bucket(s_req.bucket) bucket_obj = @store.get_bucket(s_req.bucket)
if bucket_obj if bucket_obj
unless bucket_obj.opened
bucket_obj.objects = @store.get_sorted_object_list(bucket_obj)
bucket_obj.opened = true
end
response.status = 200 response.status = 200
response['Content-Type'] = "application/xml" response['Content-Type'] = "application/xml"
query = { query = {
Expand Down Expand Up @@ -117,12 +121,12 @@ def do_GET(request, response)
return return
end end
end end
response['Content-Length'] = File::Stat.new(real_obj.io.path).size response['Content-Length'] = content_length
response['Last-Modified'] = real_obj.modified_date response['Last-Modified'] = real_obj.modified_date
if s_req.http_verb == 'HEAD' if s_req.http_verb == 'HEAD'
response.body = "" response.body = ""
else else
response.body = real_obj.io response.body = real_obj.io.read()
end end
end end
end end
Expand All @@ -132,7 +136,9 @@ def do_PUT(request,response)


case s_req.type case s_req.type
when Request::COPY when Request::COPY
bucket = @store.get_bucket(s_req.src_bucket)
@store.copy_object(s_req.src_bucket,s_req.src_object,s_req.bucket,s_req.object) @store.copy_object(s_req.src_bucket,s_req.src_object,s_req.bucket,s_req.object)
@store.delete_object(bucket,s_req.src_object, "")
when Request::STORE when Request::STORE
bucket_obj = @store.get_bucket(s_req.bucket) bucket_obj = @store.get_bucket(s_req.bucket)
if !bucket_obj if !bucket_obj
Expand Down Expand Up @@ -160,8 +166,8 @@ def do_DELETE(request,response)


case s_req.type case s_req.type
when Request::DELETE_OBJECT when Request::DELETE_OBJECT
bucket_obj = @store.get_bucket(s_req.bucket) bucket = @store.get_bucket(s_req.bucket)
@store.delete_object(bucket_obj,s_req.object,s_req.webrick_request) @store.delete_object(bucket,s_req.object,s_req.webrick_request)
when Request::DELETE_BUCKET when Request::DELETE_BUCKET
@store.delete_bucket(s_req.bucket) @store.delete_bucket(s_req.bucket)
end end
Expand Down
43 changes: 30 additions & 13 deletions test/right_aws_commands_test.rb
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -6,57 +6,74 @@
class RightAWSCommandsTest < Test::Unit::TestCase class RightAWSCommandsTest < Test::Unit::TestCase


def setup def setup
@s3 = RightAws::S3Interface.new('1E3GDYEOGFJPIT7XXXXXX','hgTHt68JY07JKUY08ftHYtERkjgtfERn57XXXXXX', @s3 = RightAws::S3.new('1E3GDYEOGFJPIT7XXXXXX','hgTHt68JY07JKUY08ftHYtERkjgtfERn57XXXXXX',
{:multi_thread => false, :server => 'localhost', {:multi_thread => false, :server => 'localhost',
:port => 10453, :protocol => 'http',:logger => Logger.new("/dev/null"),:no_subdomains => true }) :port => 10453, :protocol => 'http',:logger => Logger.new("/dev/null"),:no_subdomains => true })
end end


def teardown def teardown
@s3.interface.delete("s3media", "last_modified")
@s3.interface.delete("s3media", "exists")
end end


def test_create_bucket def test_create_bucket
bucket = @s3.create_bucket("s3media") bucket = @s3.interface.create_bucket("s3media")
assert_not_nil bucket assert_not_nil bucket
end end


def test_store def test_store
@s3.put("s3media","helloworld","Hello World Man!") @s3.interface.put("s3media","helloworld","Hello World Man!")
obj = @s3.get("s3media","helloworld") obj = @s3.interface.get("s3media","helloworld")
assert_equal "Hello World Man!",obj[:object] assert_equal "Hello World Man!",obj[:object]


obj = @s3.get("s3media","helloworld") obj = @s3.interface.get("s3media","helloworld")
end

def test_header_last_modified
@s3.interface.put("s3media","last_modified","foo")
obj = @s3.interface.get("s3media","last_modified")
assert_not_nil obj[:headers]["last-modified"]
end

def test_exists
key = @s3.bucket("s3media").key("exists")
assert !key.exists?

key.data = 'foo'
key.put
assert key.exists?
end end


def test_large_store def test_large_store
@s3.put("s3media","helloworld","Hello World Man!") @s3.interface.put("s3media","helloworld","Hello World Man!")
buffer = "" buffer = ""
500000.times do 500000.times do
buffer << "#{(rand * 100).to_i}" buffer << "#{(rand * 100).to_i}"
end end


buf_len = buffer.length buf_len = buffer.length
@s3.put("s3media","big",buffer) @s3.interface.put("s3media","big",buffer)


output = "" output = ""
@s3.get("s3media","big") do |chunk| @s3.interface.get("s3media","big") do |chunk|
output << chunk output << chunk
end end
assert_equal buf_len,output.size assert_equal buf_len,output.size
end end


def test_multi_directory def test_multi_directory
@s3.put("s3media","dir/right/123.txt","recursive") @s3.interface.put("s3media","dir/right/123.txt","recursive")
output = "" output = ""
obj = @s3.get("s3media","dir/right/123.txt") do |chunk| obj = @s3.interface.get("s3media","dir/right/123.txt") do |chunk|
output << chunk output << chunk
end end
assert_equal "recursive", output assert_equal "recursive", output
end end


def test_intra_bucket_copy def test_intra_bucket_copy
@s3.put("s3media","original.txt","Hello World") @s3.interface.put("s3media","original.txt","Hello World")
@s3.copy("s3media","original.txt","s3media","copy.txt") @s3.interface.copy("s3media","original.txt","s3media","copy.txt")
obj = @s3.get("s3media","copy.txt") obj = @s3.interface.get("s3media","copy.txt")
assert_equal "Hello World",obj[:object] assert_equal "Hello World",obj[:object]
end end


Expand Down