Skip to content
This repository has been archived by the owner on Nov 2, 2020. It is now read-only.

Commit

Permalink
Switch to aws-sdk v2
Browse files Browse the repository at this point in the history
  • Loading branch information
eagletmt committed Nov 2, 2014
1 parent 0c6bdae commit 0aab978
Show file tree
Hide file tree
Showing 5 changed files with 22 additions and 22 deletions.
2 changes: 1 addition & 1 deletion akabei.gemspec
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ Gem::Specification.new do |spec|
spec.add_dependency "libarchive"
spec.add_dependency "safe_yaml"
spec.add_dependency "thor"
spec.add_development_dependency "aws-sdk"
spec.add_development_dependency "aws-sdk-resources"
spec.add_development_dependency "bundler"
spec.add_development_dependency "coveralls"
spec.add_development_dependency "rake"
Expand Down
4 changes: 2 additions & 2 deletions lib/akabei/omakase/cli.rb
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,9 @@ def init(name)

if options[:s3]
begin
require 'aws-sdk'
require 'aws-sdk-resources'
rescue LoadError => e
say("WARNING: You don't have aws-sdk installed. Disable S3 repository.", :yellow)
say("WARNING: You don't have aws-sdk-resources installed. Disable S3 repository.", :yellow)
options[:s3] = false
end
end
Expand Down
17 changes: 11 additions & 6 deletions lib/akabei/omakase/s3.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,12 @@ module Omakase
class S3
def initialize(aws_config, shell)
if aws_config
require 'aws-sdk'
@bucket = AWS::S3.new(
require 'aws-sdk-resources'
@bucket = Aws::S3::Resource.new(
access_key_id: aws_config['access_key_id'],
secret_access_key: aws_config['secret_access_key'],
region: aws_config['region'],
).buckets[aws_config['bucket']]
).bucket(aws_config['bucket'])
@write_options = aws_config['write_options']
@shell = shell
end
Expand All @@ -34,11 +34,11 @@ def download_repository(config, arch)
def get(path)
@shell.say("Download #{path}", :blue)
path.open('wb') do |f|
@bucket.objects[path.to_s].read do |chunk|
@bucket.object(path.to_s).get do |chunk|
f.write(chunk)
end
end
rescue AWS::S3::Errors::NoSuchKey
rescue Aws::S3::Errors::NoSuchKey
@shell.say("S3: #{path} not found", :red)
FileUtils.rm_f(path)
end
Expand All @@ -64,7 +64,12 @@ def upload_repository(config, arch, packages)

def put(path, mime_type)
@shell.say("Upload #{path}", :green)
@bucket.objects[path.to_s].write(path, @write_options.merge(content_type: mime_type))
path.open do |f|
@bucket.object(path.to_s).put(@write_options.merge(
body: f,
content_type: mime_type,
))
end
end
end
end
Expand Down
2 changes: 1 addition & 1 deletion lib/akabei/omakase/templates/.akabei.yml.tt
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,6 @@ s3:
region:
write_options:
# :acl: :public_read
# :reduced_redundancy: false
# :storage_class: REDUCED_REDUNDANCY
# :server_side_encryption: :aes256
<%- end -%>
19 changes: 7 additions & 12 deletions spec/akabei/omakase/cli_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -94,10 +94,8 @@ def initialize(stdout)
let(:bucket_name) { 'test.bucket.name' }
let(:region) { 'ap-northeast-1' }

let(:buckets) { double('S3::BucketCollection') }
let(:bucket) { double('S3::Bucket') }
let(:objects) { double('S3::ObjectCollection') }
let(:write_options) { { reduced_redundancy: true } }
let(:write_options) { { storage_class: 'REDUCED_REDUNDANCY' } }

before do
c = SafeYAML.load_file('.akabei.yml')
Expand All @@ -107,31 +105,28 @@ def initialize(stdout)
c['s3']['region'] = region
c['s3']['write_options'] = write_options
open('.akabei.yml', 'w') { |f| YAML.dump(c, f) }

allow_any_instance_of(AWS::S3).to receive(:buckets).and_return(buckets)
end

it 'uploads built packages and update repositories' do
%w[i686 x86_64].each do |arch|
setup_command_expectations(arch, config.package_dir('nkf'))
end
expect(buckets).to receive(:[]).with(bucket_name).and_return(bucket)
allow(bucket).to receive(:objects).and_return(objects)
allow_any_instance_of(Aws::S3::Resource).to receive(:bucket).with(bucket_name).and_return(bucket)

%w[i686 x86_64].each do |arch|
%w[test.db test.files test.abs.tar.gz].each do |fname|
obj = double("S3::Object #{fname}")
# download and upload
expect(objects).to receive(:[]).with("test/os/#{arch}/#{fname}").twice.and_return(obj)
expect(obj).to receive(:read).and_yield('')
expect(obj).to receive(:write)
expect(bucket).to receive(:object).with("test/os/#{arch}/#{fname}").twice.and_return(obj)
expect(obj).to receive(:get).and_yield('')
expect(obj).to receive(:put)
end

# upload only
pkg = double("S3::Object built package (#{arch})")
db_name = "nkf-2.1.3-1-#{arch}.pkg.tar.xz"
expect(objects).to receive(:[]).with("test/os/#{arch}/#{db_name}").and_return(pkg)
expect(pkg).to receive(:write).with(anything, hash_including(write_options))
expect(bucket).to receive(:object).with("test/os/#{arch}/#{db_name}").and_return(pkg)
expect(pkg).to receive(:put).with(hash_including(write_options))
end

cli.invoke(:build, ['nkf'])
Expand Down

0 comments on commit 0aab978

Please sign in to comment.