From 93e9896b04f79eb09be28a9056671b1d23f3143d Mon Sep 17 00:00:00 2001 From: Orta Therox Date: Sat, 2 Mar 2013 17:40:44 +0000 Subject: [PATCH] initial commit --- .gitignore | 3 + Gemfile | 7 ++ Gemfile.lock | 48 +++++++++++ Guardfile | 4 + app.rb | 63 ++++++++++++++ example/ABGetMe.podspec | 1 + example/AFNetworking.podspec | 30 +++++++ s3_upload.rb | 154 +++++++++++++++++++++++++++++++++++ 8 files changed, 310 insertions(+) create mode 100644 .gitignore create mode 100644 Gemfile create mode 100644 Gemfile.lock create mode 100644 Guardfile create mode 100644 app.rb create mode 100644 example/ABGetMe.podspec create mode 100644 example/AFNetworking.podspec create mode 100644 s3_upload.rb diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..654b0e3 --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +docset +download +download_cache \ No newline at end of file diff --git a/Gemfile b/Gemfile new file mode 100644 index 0000000..1436d00 --- /dev/null +++ b/Gemfile @@ -0,0 +1,7 @@ +source 'https://rubygems.org' + +gem "guard-ruby" +gem 'rb-fsevent' + +gem "cocoapods-downloader" +gem "aws-sdk" diff --git a/Gemfile.lock b/Gemfile.lock new file mode 100644 index 0000000..e0d7e5c --- /dev/null +++ b/Gemfile.lock @@ -0,0 +1,48 @@ +GEM + remote: https://rubygems.org/ + specs: + aws-sdk (1.6.9) + httparty (~> 0.7) + json (~> 1.4) + nokogiri (>= 1.4.4) + uuidtools (~> 2.1) + cocoapods-downloader (0.1.0) + coderay (1.0.9) + guard (1.6.2) + listen (>= 0.6.0) + lumberjack (>= 1.0.2) + pry (>= 0.9.10) + terminal-table (>= 1.4.3) + thor (>= 0.14.6) + guard-ruby (0.0.1) + guard + httparty (0.10.2) + multi_json (~> 1.0) + multi_xml (>= 0.5.2) + json (1.7.7) + listen (0.7.3) + lumberjack (1.0.2) + method_source (0.8.1) + multi_json (1.6.1) + multi_xml (0.5.3) + nokogiri (1.5.6) + pry (0.9.12) + coderay (~> 1.0.5) + method_source (~> 0.8) + slop (~> 3.4) + rb-fsevent (0.9.3) + s3-deploy (0.0.1) + aws-sdk (= 1.6.9) + slop (3.4.3) + terminal-table (1.4.5) + thor (0.17.0) + uuidtools (2.1.3) + +PLATFORMS + ruby + +DEPENDENCIES + cocoapods-downloader + guard-ruby + rb-fsevent + s3-deploy (~> 0.0.1) diff --git a/Guardfile b/Guardfile new file mode 100644 index 0000000..90fd0be --- /dev/null +++ b/Guardfile @@ -0,0 +1,4 @@ +guard 'ruby' do + # run any benchmarking files + watch(/app.rb/) +end diff --git a/app.rb b/app.rb new file mode 100644 index 0000000..083a6e5 --- /dev/null +++ b/app.rb @@ -0,0 +1,63 @@ +require 'cocoapods-downloader' +require 'ostruct' +require 'yaml' +require 'aws/s3' + +puts "" + +current_dir = File.dirname(File.expand_path(__FILE__)) +podfile_file_path = current_dir + "/example/AFNetworking.podspec" + +module Pod + class Spec < OpenStruct + def initialize(&block) + super + self.ios = OpenStruct.new + self.osx = OpenStruct.new + + block.call(self) + end + end +end + +def create_docset_for_spec spec, location + docset_command = [] + docset_command << %Q[appledoc --create-html --keep-intermediate-files] + docset_command << "--project-name #{spec.name}" + docset_command << "--project-company test" + docset_command << "--no-install-docset" + docset_command << "--company-id com.#{spec.name.downcase}.#{spec.version.downcase}" + docset_command << "--output #{ location.clone.sub "download", "docset" }" + docset_command << location + system docset_command.join(' ') +end + +def upload_docset_to_s3 location + upload_command = [] + upload_command << "ruby s3_upload.rb" + upload_command << "--key " + ENV["S3_KEY"] + upload_command << "--secret " + ENV["S3_SECRET"] + upload_command << "--bucket cocoadocs.org" + upload_command << "--public-read --recursive" + upload_command << "docset" + system docset_command.join(' ') +end + + +spec = eval( File.open(podfile_file_path).read ) +download_location = current_dir + "/download/#{spec.name}/#{spec.version}/" +docset_location = current_dir + "/docset/#{spec.name}/#{spec.version}/" + +unless File.exists? download_location + downloader = Pod::Downloader.for_target(download_location, spec.source) + downloader.cache_root = current_dir + "download_cache" + downloader.download +end + +created_docset = create_docset_for_spec spec, download_location +if created_docset + upload_docset_to_s3 docset_location +end + + +puts "done" \ No newline at end of file diff --git a/example/ABGetMe.podspec b/example/ABGetMe.podspec new file mode 100644 index 0000000..1e41711 --- /dev/null +++ b/example/ABGetMe.podspec @@ -0,0 +1 @@ +AFNetworking.podspec \ No newline at end of file diff --git a/example/AFNetworking.podspec b/example/AFNetworking.podspec new file mode 100644 index 0000000..31dc4ef --- /dev/null +++ b/example/AFNetworking.podspec @@ -0,0 +1,30 @@ +Pod::Spec.new do |s| + s.name = 'AFNetworking' + s.version = '1.1.0' + s.license = 'MIT' + s.summary = 'A delightful iOS and OS X networking framework.' + s.homepage = 'https://github.com/AFNetworking/AFNetworking' + s.authors = { 'Mattt Thompson' => 'm@mattt.me', 'Scott Raymond' => 'sco@gowalla.com' } + s.source = { :git => 'https://github.com/AFNetworking/AFNetworking.git', :tag => '1.1.0' } + s.source_files = 'AFNetworking' + s.requires_arc = true + + s.ios.deployment_target = '5.0' + s.ios.frameworks = 'MobileCoreServices', 'SystemConfiguration' + + s.osx.deployment_target = '10.7' + s.osx.frameworks = 'CoreServices', 'SystemConfiguration' + + s.prefix_header_contents = <<-EOS +#ifdef __OBJC__ +#import +#if __IPHONE_OS_VERSION_MIN_REQUIRED + #import + #import +#else + #import + #import +#endif +#endif /* __OBJC__*/ +EOS +end diff --git a/s3_upload.rb b/s3_upload.rb new file mode 100644 index 0000000..d993e39 --- /dev/null +++ b/s3_upload.rb @@ -0,0 +1,154 @@ +#!/bin/env ruby + +# This file is released under the MIT license. +# Copyright (c) Famundo LLC, 2007. http://www.famundo.com +# Author: Gal Naor - http://devblog.famundo.com + +require 'optparse' + +# Parse the option +@buckets = [] +@compress = [] +@verbose = 0 +opts = OptionParser.new do |opts| + opts.banner = "Usage: cp2s3.rb [options] FILE_SPEC" + opts.separator "Copy files and directories from the local machine into Amazon's S3. Keep the directory structure intact." + opts.separator "Empty directories will be skipped." + opts.separator "" + opts.separator "FILE_SPEC List of files/directories. Accepts wildcards." + opts.separator " If given the -g option, interpret FILE_SPEC as a Ruby Dir::Glob style regular expressions." + opts.separator " With -g option, '' needed around the pattern to protect it from shell parsing." + opts.separator "" + opts.separator "Required:" + opts.on("-k", "--key ACCESS_KEY" , "Your S3 access key. You can also set the environment variable AWS_ACCESS_KEY_ID instead") { |o| @access_key = o } + opts.on("-s", "--secret SECRET_KEY" , "Your S3 secret key. You can also set the environment variable AWS_SECRET_ACCESS_KEY instead") { |o| @secret_key = o } + opts.on("-b", "--bucket BUCKET_NAME", "The S3 bucket you want the files to go into. Repeat for multiple buckets.") { |o| @buckets << o } + + opts.separator "" + opts.separator "Optional:" + + opts.on("-x", "--remote-prefix PREFIX", "A prefix to add to each file as it's uploaded") { |o| @prefix = o } + opts.on("-v", "--verbose", "Print the file names as they are being copied. Repeat for more details") { |o| @verbose += 1 } + opts.on("-p", "--public-read", "Set the copied files permission to be public readable.") { |o| @public = true } + opts.on("-c", "--compress EXT", "Compress files with given EXT before uploading (ususally css and js),", "setting the HTTP headers for delivery accordingly. Repeat for multiple extensions") { |o| @compress << ".#{o}" } + opts.on("-d", "--digest", "Save the sha1 digest of the file, to the S3 metadata. Require sha1sum to be installed") { |o| @save_hash = true } + opts.on("-t", "--time", "Save modified time of the file, to the S3 metadata") { |o| @save_time = true } + opts.on("-z", "--size", "Save size of the file, to the S3 metadata ") { |o| @save_size = true } + opts.on("-r", "--recursive", "If using file system based FILE_SPEC, recurse into sub-directories") { |o| @fs_recurse = true } + opts.on("-g", "--glob-ruby", "Interpret FILE_SPEC as a Ruby Dir::Glob. Make sure to put it in ''") { |o| @ruby_glob = true } + opts.on("-m", "--modified-only", "Only upload files that were modified must have need uploaded with the digest option.", "Will force digest, size and time modes on") { |o| @modified_only = @save_hash = @save_time = @save_size = true; } + opts.on("-y", "--dry-run", "Simulate only - do not upload any file to S3") { |o| @dry_run = true } + opts.on("-h", "--help", "Show this instructions") { |o| @help_exit = true } + opts.separator "" + opts.banner = "Copyright(c) Famundo LLC, 2007 (www.famundo.com). Released under the MIT license." +end + +@file_spec = opts.parse!(ARGV) + +@access_key ||= ENV['AWS_ACCESS_KEY_ID'] +@secret_key ||= ENV['AWS_SECRET_ACCESS_KEY'] +@prefix ||= '' + +if @help_exit || !@access_key || !@secret_key || @buckets.empty? || !@file_spec || @file_spec.empty? + puts opts.to_s + exit +end + +# Now we start working for real +require 'rubygems' +require 'aws/s3' +include AWS::S3 +require 'fileutils' +require 'stringio' +require 'zlib' + +# Log to stderr according to verbosity +def log message, for_level + puts(message) if @verbose >= for_level +end + + +# Connect to s3 +log "Connecting to S3", 3 +AWS::S3::Base.establish_connection!(:access_key_id => @access_key, :secret_access_key => @secret_key) +log "Connected!", 3 + +# Copy one file to amazon, compressing and setting metadata as needed +def copy_one_file file, fstat + compressed = nil + content_encoding = nil + log_prefix = '' + + # Store it! + options = {} + options[:access] = :public_read if @public + options["x-amz-meta-sha1_hash"] = `sha1sum #{file}`.split[0] if @save_hash + options["x-amz-meta-mtime"] = fstat.mtime.getutc.to_i if @save_time + options["x-amz-meta-size"] = fstat.size if @save_size + + sent_it = !@modified_only + @buckets.each do |b| + # Check if it was modified + if @modified_only + begin + if S3Object.find("#{@prefix}#{file}", b).metadata["x-amz-meta-sha1_hash"] == options["x-amz-meta-sha1_hash"] + # No change - go on + log("Skipping: #{file} in #{b}", 3) + next + end + rescue AWS::S3::NoSuchKey => ex + # This file isn't there yet, so we need to send it + end + end + + # We compress only if we need to compredd and we didn't compress yet + if !@compress.empty? && compressed.nil? + if @compress.include?(File.extname(file)) + # Compress it + log "Compressing #{file}", 3 + strio = StringIO.open('', 'w') + gz = Zlib::GzipWriter.new(strio) + gz.write(open(file).read) + gz.close + compressed = strio.string + options["Content-Encoding"] = 'gzip' + log_prefix = '[c] ' if @verbose == 2 # Mark as compressed + elsif @verbose == 2 + log_prefix = '[-] ' # So the file names align... + end + end + + log("Sending #{file} to #{b}...", 3) + S3Object.store("#{@prefix}#{file}", compressed.nil? ? open(file) : compressed, b, options) unless @dry_run + sent_it = true + end + log("#{log_prefix}#{file}", 1) if sent_it +end + +# Copy one file/dir from the system, recurssing if needed. Used for non-Ruby style globs +def copy_one_file_or_dir name, base_dir + return if name[0,1] == '.' + file_name = "#{base_dir}#{name}" + fstat = File.stat(file_name) + copy_one_file(file_name, fstat) if fstat.file? || fstat.symlink? + # See if we need to recurse... + if @fs_recurse && fstat.directory? + my_base = file_name + '/' + Dir.foreach(my_base) { |e| copy_one_file_or_dir(e, my_base) } + end +end + + +# Glob all the dirs for the files to upload - we expect a ruby like glob format or file system list from the command line +@file_spec.each do |spec| + if @ruby_glob + # Ruby style + Dir.glob(spec) do |file| + fstat = File.stat(file) + copy_one_file(file, fstat) if fstat.file? || fstat.symlink? + end + else + # File system style + copy_one_file_or_dir(spec, '') + end +end