Skip to content

Commit

Permalink
:s3_credentials will now accept a Pathname
Browse files Browse the repository at this point in the history
Rails.root returns a Pathname and you can now declare your attachment
without having to call #to_s first:

has_attached_file :avatar,
                  :storage => :s3,
                  :s3_credentials => Rails.root.join('s3.yml')
(cherry picked from commit 65d7600)
  • Loading branch information
hardbap authored and Jon Yurek committed Apr 29, 2010
1 parent 9283b5e commit 95fc7b6
Show file tree
Hide file tree
Showing 2 changed files with 46 additions and 24 deletions.
32 changes: 16 additions & 16 deletions lib/paperclip/storage.rb
Expand Up @@ -8,18 +8,18 @@ module Storage
# * +path+: The location of the repository of attachments on disk. This can (and, in
# almost all cases, should) be coordinated with the value of the +url+ option to
# allow files to be saved into a place where Apache can serve them without
# hitting your app. Defaults to
# hitting your app. Defaults to
# ":rails_root/public/:attachment/:id/:style/:basename.:extension"
# By default this places the files in the app's public directory which can be served
# directly. If you are using capistrano for deployment, a good idea would be to
# make a symlink to the capistrano-created system directory from inside your app's
# By default this places the files in the app's public directory which can be served
# directly. If you are using capistrano for deployment, a good idea would be to
# make a symlink to the capistrano-created system directory from inside your app's
# public directory.
# See Paperclip::Attachment#interpolate for more information on variable interpolaton.
# :path => "/var/app/attachments/:class/:id/:style/:basename.:extension"
module Filesystem
def self.extended base
end

def exists?(style_name = default_style)
if original_filename
File.exist?(path(style_name))
Expand Down Expand Up @@ -78,25 +78,25 @@ def flush_deletes #:nodoc:
# database.yml file, so different environments can use different accounts:
# development:
# access_key_id: 123...
# secret_access_key: 123...
# secret_access_key: 123...
# test:
# access_key_id: abc...
# secret_access_key: abc...
# secret_access_key: abc...
# production:
# access_key_id: 456...
# secret_access_key: 456...
# secret_access_key: 456...
# This is not required, however, and the file may simply look like this:
# access_key_id: 456...
# secret_access_key: 456...
# secret_access_key: 456...
# In which case, those access keys will be used in all environments. You can also
# put your bucket name in this file, instead of adding it to the code directly.
# This is useful when you want the same account but a different bucket for
# This is useful when you want the same account but a different bucket for
# development versus production.
# * +s3_permissions+: This is a String that should be one of the "canned" access
# policies that S3 provides (more information can be found here:
# http://docs.amazonwebservices.com/AmazonS3/2006-03-01/RESTAccessPolicy.html#RESTCannedAccessPolicies)
# The default for Paperclip is :public_read.
# * +s3_protocol+: The protocol for the URLs generated to your S3 assets. Can be either
# * +s3_protocol+: The protocol for the URLs generated to your S3 assets. Can be either
# 'http' or 'https'. Defaults to 'http' when your :s3_permissions are :public_read (the
# default), and 'https' when your :s3_permissions are anything else.
# * +s3_headers+: A hash of headers such as {'Expires' => 1.year.from_now.httpdate}
Expand All @@ -111,7 +111,7 @@ def flush_deletes #:nodoc:
# * +url+: There are three options for the S3 url. You can choose to have the bucket's name
# placed domain-style (bucket.s3.amazonaws.com) or path-style (s3.amazonaws.com/bucket).
# Lastly, you can specify a CNAME (which requires the CNAME to be specified as
# :s3_alias_url. You can read more about CNAMEs and S3 at
# :s3_alias_url. You can read more about CNAMEs and S3 at
# http://docs.amazonwebservices.com/AmazonS3/latest/index.html?VirtualHosting.html
# Normally, this won't matter in the slightest and you can leave the default (which is
# path-style, or :s3_path_url). But in some cases paths don't work and you need to use
Expand Down Expand Up @@ -159,7 +159,7 @@ def self.extended base
"#{attachment.s3_protocol}://#{attachment.bucket_name}.s3.amazonaws.com/#{attachment.path(style).gsub(%r{^/}, "")}"
end
end

def expiring_url(time = 3600)
AWS::S3::S3Object.url_for(path, bucket_name, :expires_in => time )
end
Expand All @@ -176,7 +176,7 @@ def parse_credentials creds
creds = find_credentials(creds).stringify_keys
(creds[Rails.env] || creds).symbolize_keys
end

def exists?(style = default_style)
if original_filename
AWS::S3::S3Object.exists?(path(style), bucket_name)
Expand Down Expand Up @@ -227,12 +227,12 @@ def flush_deletes #:nodoc:
end
@queued_for_delete = []
end

def find_credentials creds
case creds
when File
YAML::load(ERB.new(File.read(creds.path)).result)
when String
when String, Pathname
YAML::load(ERB.new(File.read(creds)).result)
when Hash
creds
Expand Down
38 changes: 30 additions & 8 deletions test/storage_test.rb
Expand Up @@ -102,17 +102,17 @@ def rails_env(env)
:s3_host_alias => "something.something.com",
:path => ":attachment/:basename.:extension",
:url => ":s3_alias_url"

rails_env("production")

@dummy = Dummy.new
@dummy.avatar = StringIO.new(".")

AWS::S3::S3Object.expects(:url_for).with("avatars/stringio.txt", "prod_bucket", { :expires_in => 3600 })

@dummy.avatar.expiring_url
end

should "should succeed" do
assert true
end
Expand Down Expand Up @@ -184,7 +184,7 @@ def rails_env(env)
assert true
end
end

context "and remove" do
setup do
AWS::S3::S3Object.stubs(:exists?).returns(true)
Expand All @@ -198,15 +198,15 @@ def rails_env(env)
end
end
end

context "An attachment with S3 storage and bucket defined as a Proc" do
setup do
AWS::S3::Base.stubs(:establish_connection!)
rebuild_model :storage => :s3,
:bucket => lambda { |attachment| "bucket_#{attachment.instance.other}" },
:s3_credentials => {:not => :important}
end

should "get the right bucket name" do
assert "bucket_a", Dummy.new(:other => 'a').avatar.bucket_name
assert "bucket_b", Dummy.new(:other => 'b').avatar.bucket_name
Expand Down Expand Up @@ -254,6 +254,28 @@ def rails_env(env)
end
end

context "with S3 credentials supplied as Pathname" do
setup do
ENV['S3_KEY'] = 'pathname_key'
ENV['S3_BUCKET'] = 'pathname_bucket'
ENV['S3_SECRET'] = 'pathname_secret'

rails_env('test')

rebuild_model :storage => :s3,
:s3_credentials => Pathname.new(File.join(File.dirname(__FILE__))).join("fixtures/s3.yml")

Dummy.delete_all
@dummy = Dummy.new
end

should "parse the credentials" do
assert_equal 'pathname_bucket', @dummy.avatar.bucket_name
assert_equal 'pathname_key', AWS::S3::Base.connection.options[:access_key_id]
assert_equal 'pathname_secret', AWS::S3::Base.connection.options[:secret_access_key]
end
end

context "with S3 credentials in a YAML file" do
setup do
ENV['S3_KEY'] = 'env_key'
Expand Down

0 comments on commit 95fc7b6

Please sign in to comment.