Permalink
Browse files

update aws gem, stub in rake tasks for s3

1 parent b57b541 commit f0a8598ba204a82a8f853caae03c770522aaf00b @r888888888 committed Feb 8, 2017
Showing with 101 additions and 28 deletions.
  1. +8 −7 Gemfile.lock
  2. +4 −4 app/models/post.rb
  3. +89 −17 lib/tasks/images.rake
View
@@ -71,12 +71,14 @@ GEM
public_suffix (~> 2.0, >= 2.0.2)
arel (6.0.3)
awesome_print (1.6.1)
- aws-sdk (2.1.14)
- aws-sdk-resources (= 2.1.14)
- aws-sdk-core (2.1.14)
+ aws-sdk (2.7.4)
+ aws-sdk-resources (= 2.7.4)
+ aws-sdk-core (2.7.4)
+ aws-sigv4 (~> 1.0)
jmespath (~> 1.0)
- aws-sdk-resources (2.1.14)
- aws-sdk-core (= 2.1.14)
+ aws-sdk-resources (2.7.4)
+ aws-sdk-core (= 2.7.4)
+ aws-sigv4 (1.0.0)
bcrypt (3.1.10)
bcrypt (3.1.10-x64-mingw32)
bcrypt-ruby (3.1.5)
@@ -194,8 +196,7 @@ GEM
rake
rdoc
semver2
- jmespath (1.0.2)
- multi_json (~> 1.0)
+ jmespath (1.3.1)
json (1.8.6)
jwt (1.5.6)
kgio (2.10.0)
View
@@ -105,11 +105,11 @@ def file_name
end
def file_url
- if cdn_hosted?
- Danbooru.config.danbooru_s3_base_url + "/#{file_path_prefix}#{md5}.#{file_ext}"
- else
+ # if cdn_hosted?
+ # Danbooru.config.danbooru_s3_base_url + "/#{file_path_prefix}#{md5}.#{file_ext}"
+ # else
"/data/#{seo_tag_string}#{file_path_prefix}#{md5}.#{file_ext}"
- end
+ # end
end
def large_file_url
View
@@ -1,23 +1,95 @@
require 'danbooru_image_resizer/danbooru_image_resizer'
namespace :images do
- desc "Enable CDN"
- task :enable_cdn, [:min_id, :max_id] => :environment do |t, args|
- CurrentUser.scoped(User.admins.first, "127.0.0.1") do
- credentials = Aws::Credentials.new(Danbooru.config.aws_access_key_id, Danbooru.config.aws_secret_access_key)
- Aws.config.update({
- region: "us-east-1",
- credentials: credentials
- })
- client = Aws::S3::Client.new
- bucket = Danbooru.config.aws_s3_bucket_name
-
- Post.where("id >= ? and id <= ?", args[:min_id], args[:max_id]).find_each do |post|
- post.cdn_hosted = true
- post.save
- key = File.basename(post.file_path)
- client.copy_object(bucket: bucket, key: key, acl: "public-read", storage_class: "STANDARD", copy_source: "/#{bucket}/#{key}", metadata_directive: "COPY")
- # client.put_object(bucket: bucket, key: key, body: body, content_md5: base64_md5, acl: "public-read", storage_class: "STANDARD")
+ desc "Upload large images to S3"
+ task :upload_large_to_s3, [:min_id, :max_id] => :environment do |t, args|
+ min_id = args[:min_id]
+ max_id = args[:max_id]
+
+ credentials = Aws::Credentials.new(Danbooru.config.aws_access_key_id, Danbooru.config.aws_secret_access_key)
+ Aws.config.update({
+ region: "us-west-2",
+ credentials: credentials
+ })
+ client = Aws::S3::Client.new
+ bucket = "danbooru-large"
+
+ Post.where("id >= ? and id <= ? and image_width > ?", min_id, max_id, Danbooru.config.large_image_width).find_each do |post|
+ if File.exists?(post.large_file_path)
+ key = File.basename(post.large_file_path)
+ body = open(post.large_file_path, "rb")
+ client.put_object(bucket: bucket, key: key, acl: "authenticated-read", body: body, content_md5: base64_md5)
+ end
+ end
+ end
+
+ desc "Upload previews to S3"
+ task :upload_preview_to_s3, [:min_id, :max_id] => :environment do |t, args|
+ min_id = args[:min_id]
+ max_id = args[:max_id]
+
+ credentials = Aws::Credentials.new(Danbooru.config.aws_access_key_id, Danbooru.config.aws_secret_access_key)
+ Aws.config.update({
+ region: "us-west-2",
+ credentials: credentials
+ })
+ client = Aws::S3::Client.new
+ bucket = "danbooru-preview"
+
+ Post.where("id >= ? and id <= ?", min_id, max_id).find_each do |post|
+ if File.exists?(post.preview_file_path)
+ key = File.basename(post.preview_file_path)
+ body = open(post.large_file_path, "rb")
+ client.put_object(bucket: bucket, key: key, acl: "authenticated-read", body: body, content_md5: base64_md5)
+ end
+ end
+ end
+
+ desc "Reset S3 + Storage Class"
+ task :reset_s3, [:min_id, :max_id] => :environment do |t, args|
+ min_id = args[:min_id]
+ max_id = args[:max_id]
+
+ credentials = Aws::Credentials.new(Danbooru.config.aws_access_key_id, Danbooru.config.aws_secret_access_key)
+ Aws.config.update({
+ region: "us-east-1",
+ credentials: credentials
+ })
+ client = Aws::S3::Client.new
+ bucket = Danbooru.config.aws_s3_bucket_name
+
+ Post.where("id >= ? and id <= ?", min_id, max_id).find_each do |post|
+ key = File.basename(post.file_path)
+ client.copy_object(bucket: bucket, key: key, acl: "authenticated-read", storage_class: "STANDARD", copy_source: "/#{bucket}/#{key}", metadata_directive: "COPY")
+ end
+ end
+
+ task :restore_glacier, [:min_id, :max_id] => :environment do |t, args|
+ min_id = args[:min_id] # 10_001
+ max_id = args[:max_id] # 50_000
+
+ credentials = Aws::Credentials.new(Danbooru.config.aws_access_key_id, Danbooru.config.aws_secret_access_key)
+ Aws.config.update({
+ region: "us-east-1",
+ credentials: credentials
+ })
+ client = Aws::S3::Client.new
+ bucket = Danbooru.config.aws_s3_bucket_name
+
+ Post.where("id >= ? and id <= ?", min_id, max_id).find_each do |post|
+ key = File.basename(post.file_path)
+ begin
+ client.restore_object(
+ bucket: bucket,
+ key: key,
+ restore_request: {
+ days: 1,
+ glacier_job_parameters: {
+ tier: "Bulk"
+ }
+ }
+ )
+ rescue Aws::S3::Errors::InvalidObjectState, Aws::S3::Errors::NoSuchKey, Aws::S3::Errors::RestoreAlreadyInProgress
end
end
end

0 comments on commit f0a8598

Please sign in to comment.