Skip to content
This repository has been archived by the owner on Mar 21, 2018. It is now read-only.

Commit

Permalink
initial commit
Browse files Browse the repository at this point in the history
  • Loading branch information
orta committed Mar 2, 2013
0 parents commit 93e9896
Show file tree
Hide file tree
Showing 8 changed files with 310 additions and 0 deletions.
3 changes: 3 additions & 0 deletions .gitignore
@@ -0,0 +1,3 @@
docset
download
download_cache
7 changes: 7 additions & 0 deletions Gemfile
@@ -0,0 +1,7 @@
source 'https://rubygems.org'

gem "guard-ruby"
gem 'rb-fsevent'

gem "cocoapods-downloader"
gem "aws-sdk"
48 changes: 48 additions & 0 deletions Gemfile.lock
@@ -0,0 +1,48 @@
GEM
remote: https://rubygems.org/
specs:
aws-sdk (1.6.9)
httparty (~> 0.7)
json (~> 1.4)
nokogiri (>= 1.4.4)
uuidtools (~> 2.1)
cocoapods-downloader (0.1.0)
coderay (1.0.9)
guard (1.6.2)
listen (>= 0.6.0)
lumberjack (>= 1.0.2)
pry (>= 0.9.10)
terminal-table (>= 1.4.3)
thor (>= 0.14.6)
guard-ruby (0.0.1)
guard
httparty (0.10.2)
multi_json (~> 1.0)
multi_xml (>= 0.5.2)
json (1.7.7)
listen (0.7.3)
lumberjack (1.0.2)
method_source (0.8.1)
multi_json (1.6.1)
multi_xml (0.5.3)
nokogiri (1.5.6)
pry (0.9.12)
coderay (~> 1.0.5)
method_source (~> 0.8)
slop (~> 3.4)
rb-fsevent (0.9.3)
s3-deploy (0.0.1)
aws-sdk (= 1.6.9)
slop (3.4.3)
terminal-table (1.4.5)
thor (0.17.0)
uuidtools (2.1.3)

PLATFORMS
ruby

DEPENDENCIES
cocoapods-downloader
guard-ruby
rb-fsevent
s3-deploy (~> 0.0.1)
4 changes: 4 additions & 0 deletions Guardfile
@@ -0,0 +1,4 @@
guard 'ruby' do
# run any benchmarking files
watch(/app.rb/)
end
63 changes: 63 additions & 0 deletions app.rb
@@ -0,0 +1,63 @@
require 'cocoapods-downloader'
require 'ostruct'
require 'yaml'
require 'aws/s3'

puts ""

current_dir = File.dirname(File.expand_path(__FILE__))
podfile_file_path = current_dir + "/example/AFNetworking.podspec"

module Pod
class Spec < OpenStruct
def initialize(&block)
super
self.ios = OpenStruct.new
self.osx = OpenStruct.new

block.call(self)
end
end
end

def create_docset_for_spec spec, location
docset_command = []
docset_command << %Q[appledoc --create-html --keep-intermediate-files]
docset_command << "--project-name #{spec.name}"
docset_command << "--project-company test"
docset_command << "--no-install-docset"
docset_command << "--company-id com.#{spec.name.downcase}.#{spec.version.downcase}"
docset_command << "--output #{ location.clone.sub "download", "docset" }"
docset_command << location
system docset_command.join(' ')
end

def upload_docset_to_s3 location
upload_command = []
upload_command << "ruby s3_upload.rb"
upload_command << "--key " + ENV["S3_KEY"]
upload_command << "--secret " + ENV["S3_SECRET"]
upload_command << "--bucket cocoadocs.org"
upload_command << "--public-read --recursive"
upload_command << "docset"
system docset_command.join(' ')
end


spec = eval( File.open(podfile_file_path).read )
download_location = current_dir + "/download/#{spec.name}/#{spec.version}/"
docset_location = current_dir + "/docset/#{spec.name}/#{spec.version}/"

unless File.exists? download_location
downloader = Pod::Downloader.for_target(download_location, spec.source)
downloader.cache_root = current_dir + "download_cache"
downloader.download
end

created_docset = create_docset_for_spec spec, download_location
if created_docset
upload_docset_to_s3 docset_location
end


puts "done"
1 change: 1 addition & 0 deletions example/ABGetMe.podspec
@@ -0,0 +1 @@
AFNetworking.podspec
30 changes: 30 additions & 0 deletions example/AFNetworking.podspec
@@ -0,0 +1,30 @@
Pod::Spec.new do |s|
s.name = 'AFNetworking'
s.version = '1.1.0'
s.license = 'MIT'
s.summary = 'A delightful iOS and OS X networking framework.'
s.homepage = 'https://github.com/AFNetworking/AFNetworking'
s.authors = { 'Mattt Thompson' => 'm@mattt.me', 'Scott Raymond' => 'sco@gowalla.com' }
s.source = { :git => 'https://github.com/AFNetworking/AFNetworking.git', :tag => '1.1.0' }
s.source_files = 'AFNetworking'
s.requires_arc = true

s.ios.deployment_target = '5.0'
s.ios.frameworks = 'MobileCoreServices', 'SystemConfiguration'

s.osx.deployment_target = '10.7'
s.osx.frameworks = 'CoreServices', 'SystemConfiguration'

s.prefix_header_contents = <<-EOS
#ifdef __OBJC__
#import <Availability.h>
#if __IPHONE_OS_VERSION_MIN_REQUIRED
#import <SystemConfiguration/SystemConfiguration.h>
#import <MobileCoreServices/MobileCoreServices.h>
#else
#import <SystemConfiguration/SystemConfiguration.h>
#import <CoreServices/CoreServices.h>
#endif
#endif /* __OBJC__*/
EOS
end
154 changes: 154 additions & 0 deletions s3_upload.rb
@@ -0,0 +1,154 @@
#!/bin/env ruby

# This file is released under the MIT license.
# Copyright (c) Famundo LLC, 2007. http://www.famundo.com
# Author: Gal Naor - http://devblog.famundo.com

require 'optparse'

# Parse the option
@buckets = []
@compress = []
@verbose = 0
opts = OptionParser.new do |opts|
opts.banner = "Usage: cp2s3.rb [options] FILE_SPEC"
opts.separator "Copy files and directories from the local machine into Amazon's S3. Keep the directory structure intact."
opts.separator "Empty directories will be skipped."
opts.separator ""
opts.separator "FILE_SPEC List of files/directories. Accepts wildcards."
opts.separator " If given the -g option, interpret FILE_SPEC as a Ruby Dir::Glob style regular expressions."
opts.separator " With -g option, '' needed around the pattern to protect it from shell parsing."
opts.separator ""
opts.separator "Required:"
opts.on("-k", "--key ACCESS_KEY" , "Your S3 access key. You can also set the environment variable AWS_ACCESS_KEY_ID instead") { |o| @access_key = o }
opts.on("-s", "--secret SECRET_KEY" , "Your S3 secret key. You can also set the environment variable AWS_SECRET_ACCESS_KEY instead") { |o| @secret_key = o }
opts.on("-b", "--bucket BUCKET_NAME", "The S3 bucket you want the files to go into. Repeat for multiple buckets.") { |o| @buckets << o }

opts.separator ""
opts.separator "Optional:"

opts.on("-x", "--remote-prefix PREFIX", "A prefix to add to each file as it's uploaded") { |o| @prefix = o }
opts.on("-v", "--verbose", "Print the file names as they are being copied. Repeat for more details") { |o| @verbose += 1 }
opts.on("-p", "--public-read", "Set the copied files permission to be public readable.") { |o| @public = true }
opts.on("-c", "--compress EXT", "Compress files with given EXT before uploading (ususally css and js),", "setting the HTTP headers for delivery accordingly. Repeat for multiple extensions") { |o| @compress << ".#{o}" }
opts.on("-d", "--digest", "Save the sha1 digest of the file, to the S3 metadata. Require sha1sum to be installed") { |o| @save_hash = true }
opts.on("-t", "--time", "Save modified time of the file, to the S3 metadata") { |o| @save_time = true }
opts.on("-z", "--size", "Save size of the file, to the S3 metadata ") { |o| @save_size = true }
opts.on("-r", "--recursive", "If using file system based FILE_SPEC, recurse into sub-directories") { |o| @fs_recurse = true }
opts.on("-g", "--glob-ruby", "Interpret FILE_SPEC as a Ruby Dir::Glob. Make sure to put it in ''") { |o| @ruby_glob = true }
opts.on("-m", "--modified-only", "Only upload files that were modified must have need uploaded with the digest option.", "Will force digest, size and time modes on") { |o| @modified_only = @save_hash = @save_time = @save_size = true; }
opts.on("-y", "--dry-run", "Simulate only - do not upload any file to S3") { |o| @dry_run = true }
opts.on("-h", "--help", "Show this instructions") { |o| @help_exit = true }
opts.separator ""
opts.banner = "Copyright(c) Famundo LLC, 2007 (www.famundo.com). Released under the MIT license."
end

@file_spec = opts.parse!(ARGV)

@access_key ||= ENV['AWS_ACCESS_KEY_ID']
@secret_key ||= ENV['AWS_SECRET_ACCESS_KEY']
@prefix ||= ''

if @help_exit || !@access_key || !@secret_key || @buckets.empty? || !@file_spec || @file_spec.empty?
puts opts.to_s
exit
end

# Now we start working for real
require 'rubygems'
require 'aws/s3'
include AWS::S3
require 'fileutils'
require 'stringio'
require 'zlib'

# Log to stderr according to verbosity
def log message, for_level
puts(message) if @verbose >= for_level
end


# Connect to s3
log "Connecting to S3", 3
AWS::S3::Base.establish_connection!(:access_key_id => @access_key, :secret_access_key => @secret_key)
log "Connected!", 3

# Copy one file to amazon, compressing and setting metadata as needed
def copy_one_file file, fstat
compressed = nil
content_encoding = nil
log_prefix = ''

# Store it!
options = {}
options[:access] = :public_read if @public
options["x-amz-meta-sha1_hash"] = `sha1sum #{file}`.split[0] if @save_hash
options["x-amz-meta-mtime"] = fstat.mtime.getutc.to_i if @save_time
options["x-amz-meta-size"] = fstat.size if @save_size

sent_it = !@modified_only
@buckets.each do |b|
# Check if it was modified
if @modified_only
begin
if S3Object.find("#{@prefix}#{file}", b).metadata["x-amz-meta-sha1_hash"] == options["x-amz-meta-sha1_hash"]
# No change - go on
log("Skipping: #{file} in #{b}", 3)
next
end
rescue AWS::S3::NoSuchKey => ex
# This file isn't there yet, so we need to send it
end
end

# We compress only if we need to compredd and we didn't compress yet
if !@compress.empty? && compressed.nil?
if @compress.include?(File.extname(file))
# Compress it
log "Compressing #{file}", 3
strio = StringIO.open('', 'w')
gz = Zlib::GzipWriter.new(strio)
gz.write(open(file).read)
gz.close
compressed = strio.string
options["Content-Encoding"] = 'gzip'
log_prefix = '[c] ' if @verbose == 2 # Mark as compressed
elsif @verbose == 2
log_prefix = '[-] ' # So the file names align...
end
end

log("Sending #{file} to #{b}...", 3)
S3Object.store("#{@prefix}#{file}", compressed.nil? ? open(file) : compressed, b, options) unless @dry_run
sent_it = true
end
log("#{log_prefix}#{file}", 1) if sent_it
end

# Copy one file/dir from the system, recurssing if needed. Used for non-Ruby style globs
def copy_one_file_or_dir name, base_dir
return if name[0,1] == '.'
file_name = "#{base_dir}#{name}"
fstat = File.stat(file_name)
copy_one_file(file_name, fstat) if fstat.file? || fstat.symlink?
# See if we need to recurse...
if @fs_recurse && fstat.directory?
my_base = file_name + '/'
Dir.foreach(my_base) { |e| copy_one_file_or_dir(e, my_base) }
end
end


# Glob all the dirs for the files to upload - we expect a ruby like glob format or file system list from the command line
@file_spec.each do |spec|
if @ruby_glob
# Ruby style
Dir.glob(spec) do |file|
fstat = File.stat(file)
copy_one_file(file, fstat) if fstat.file? || fstat.symlink?
end
else
# File system style
copy_one_file_or_dir(spec, '')
end
end

0 comments on commit 93e9896

Please sign in to comment.