Permalink
Switch branches/tags
production_RELEASE_RELEASE_CANDIDATE_STABLE_34 production_RELEASE_RELEASE_CANDIDATE_STABLE_32 production_RELEASE_RELEASE_CANDIDATE_STABLE_31 production_RELEASE_RELEASE_CANDIDATE_STABLE_30 production_RELEASE_RELEASE_CANDIDATE_STABLE_28 production_RELEASE_RELEASE_CANDIDATE_STABLE_27 production_RELEASE_RELEASE_CANDIDATE_STABLE_26 production_RELEASE_RELEASE_CANDIDATE_STABLE_25 production_RELEASE_RELEASE_CANDIDATE_STABLE_24 production_RELEASE_RELEASE_CANDIDATE_STABLE_23 production_RELEASE_RELEASE_CANDIDATE_STABLE_22 production_RELEASE_RELEASE_CANDIDATE_STABLE_21 production_RELEASE_RELEASE_CANDIDATE_STABLE_20 production_RELEASE_RELEASE_CANDIDATE_STABLE_19 production_RELEASE_RELEASE_CANDIDATE_STABLE_18 production_RELEASE_RELEASE_CANDIDATE_STABLE_17 production_RELEASE_RELEASE_CANDIDATE_STABLE_16 production_RELEASE_RELEASE_CANDIDATE_STABLE_15 production_RELEASE_RELEASE_CANDIDATE_STABLE_14 production_RELEASE_RELEASE_CANDIDATE_STABLE_13 production_RELEASE_RELEASE_CANDIDATE_STABLE_12 production_RELEASE_RELEASE_CANDIDATE_STABLE_11 production_RELEASE_RELEASE_CANDIDATE_STABLE_10 production_RELEASE_RELEASE_CANDIDATE_STABLE_8 production_RELEASE_RELEASE_CANDIDATE_STABLE_6 production_RELEASE_RELEASE_CANDIDATE_STABLE_5 production_RELEASE_RELEASE_CANDIDATE_STABLE_2 production_RELEASE_RELEASE_CANDIDATE_BETA_1 production_RELEASE_DEV_597 production_RELEASE_DEV_596 production_RELEASE_DEV_595 production_RELEASE_DEV_594 production_RELEASE_DEV_593 production_RELEASE_DEV_592 production_RELEASE_DEV_591 production_RELEASE_DEV_590 production_RELEASE_DEV_589 production_RELEASE_DEV_588 production_RELEASE_DEV_587 production_RELEASE_DEV_586 production_RELEASE_DEV_585 production_RELEASE_DEV_584 production_RELEASE_DEV_583 production_RELEASE_DEV_582 production_RELEASE_DEV_581 production_RELEASE_DEV_580 production_RELEASE_DEV_579 production_RELEASE_DEV_578 production_RELEASE_DEV_577 production_RELEASE_DEV_576 production_RELEASE_DEV_575 production_RELEASE_DEV_574 production_RELEASE_DEV_573 production_RELEASE_DEV_572 production_RELEASE_DEV_571 production_RELEASE_DEV_570 production_RELEASE_DEV_569 production_RELEASE_DEV_568 production_RELEASE_DEV_567 production_RELEASE_DEV_566 production_RELEASE_DEV_565 production_RELEASE_DEV_564 production_RELEASE_DEV_563 production_RELEASE_DEV_562 production_RELEASE_DEV_561 production_RELEASE_DEV_560 production_RELEASE_DEV_559 production_RELEASE_DEV_558 production_RELEASE_DEV_557 production_RELEASE_DEV_556 production_RELEASE_DEV_555 production_RELEASE_DEV_554 production_RELEASE_DEV_553 production_RELEASE_DEV_552 production_RELEASE_DEV_551 production_RELEASE_DEV_550 production_RELEASE_DEV_549 production_RELEASE_DEV_548 production_RELEASE_DEV_547 production_RELEASE_DEV_546 production_RELEASE_DEV_545 production_RELEASE_DEV_544 production_RELEASE_DEV_543 production_RELEASE_DEV_542 production_RELEASE_DEV_541 production_RELEASE_DEV_540 production_RELEASE_DEV_539 production_RELEASE_DEV_538 production_RELEASE_DEV_537 production_RELEASE_DEV_536 production_RELEASE_DEV_535 production_RELEASE_DEV_534 production_RELEASE_DEV_533 production_RELEASE_DEV_532 production_RELEASE_DEV_531 production_RELEASE_DEV_530 production_RELEASE_DEV_529 production_RELEASE_DEV_528 production_RELEASE_DEV_527 production_RELEASE_DEV_526
Nothing to show
Find file
Fetching contributors…
Cannot retrieve contributors at this time
149 lines (143 sloc) 4.56 KB
dep 'mirror has assets' do
define_var :mirror_prefix, :default => '/srv/http' #L{ "http://#{var(:mirror_path).p.basename}" }
define_var :local_path, :default => :mirror_domain
def scanned_urls
(var(:mirror_prefix) / var(:local_path)).glob("**/*").select {|f|
f[/\.(html?|css)$/i]
}.map {|f|
f.p.read.scan(/url\(['"]?([^)'"]+)['"]?\)/).flatten
}.flatten.uniq
end
def asset_map
scanned_urls.group_by {|url|
url[/^(http\:)?\/\//] ? url.scan(/^[http\:]*\/\/([^\/]+)/).flatten.first : var(:mirror_domain)
}.map_values {|domain,urls|
urls.map {|url| url.sub(/^(http\:)?\/\/[^\/]+\//, '') }
}
end
def nonexistent_asset_map
asset_map.map_values {|domain,assets|
assets.reject {|asset|
path = var(:mirror_prefix) / domain / asset
path.exists? && !path.empty?
}
}
end
met? { nonexistent_asset_map.values.all? &:empty? }
meet {
nonexistent_asset_map.each_pair {|domain,assets|
assets.each {|asset|
shell "mkdir -p '#{var(:mirror_prefix) / domain / File.dirname(asset)}'"
log_shell "Downloading http://#{domain}/#{asset}", "wget -O '#{var(:mirror_prefix) / domain / asset}' '#{File.join "http://#{domain}", asset}'"
}
}
}
end
meta :twitter do
def users
"~/Desktop/rc7/campers.txt".p.read.split(/\n+/).uniq.map {|name| name.sub(/^@/, '') }
end
def avatars
users.map {|user|
path = "~/Desktop/rc7/avatars/".p.glob("#{user}.*").first
path.p unless path.nil?
}.compact
end
def missing_avatars
avatars.reject {|avatar|
avatar.exists? && !avatar.empty?
}
end
end
dep 'avatars mirrored.twitter' do
define_var :twitter_pass, :default => L{ 'secret' }
met? { missing_avatars.empty? }
meet {
require 'rubygems'
require 'twitter'
client = Twitter::Base.new(Twitter::HTTPAuth.new(var(:twitter_username), var(:twitter_pass)))
cd "~/Desktop/rc7/avatars", :create => true do
missing_avatars.each {|name|
begin
url = client.user(name)['profile_image_url'].sub(/_normal(\.[a-zA-Z]+)$/) { $1 }
Babushka::Archive.download url, name
rescue Twitter::NotFound
log_error "#{name}: 404."
rescue Twitter::InformTwitter
log_error "#{name}: Fail whale!"
rescue JSON::ParserError
log_error "#{name}: Bad JSON."
end
}
end
}
end
dep 'avatars renamed.twitter' do
# requires 'twitter avatars mirrored'
met? { (avatars - missing_avatars).all? {|avatar| avatar.to_s[/\.[jpengif]{3,4}$/] } }
meet {
(avatars - missing_avatars).each {|avatar|
type = shell("file '#{avatar}'").scan(/([A-Z]+) image/).flatten.first
unless type.nil?
ext = type.downcase
shell "mv '#{avatar}' '#{avatar}.#{ext}'"
end
}
}
after {
log "These ones are broken:"
log avatars.reject {|avatar| avatar.to_s[/\.[jpengif]{3,4}$/] }.join("\n")
}
end
dep 'gravatars mirrored' do
def users
"~/Desktop/rc7/emails.txt".p.read.split(/\n+/).uniq
end
def missing_avatars
users.reject {|user| "~/Desktop/rc7/gravatars/#{user}.jpg".p.exists? }
end
met? { missing_avatars.empty? }
meet {
require 'digest/md5'
cd "~/Desktop/rc7/gravatars", :create => true do
missing_avatars.each {|email|
Babushka::Archive.download "http://gravatar.com/avatar/#{Digest::MD5.hexdigest(email)}.jpg?s=512&d=404", "#{email}.jpg"
}
end
}
end
dep 'google ajax libs mirrored' do
define_var :mirror_root, :default => '/srv/http/ajax.googleapis.com'
def search_libstate doc, key
doc.search("dd.al-libstate[text()*='#{key}:']").text.gsub("#{key}:", '').strip
end
def urls
require 'rubygems'
require 'hpricot'
require 'net/http'
Hpricot(Net::HTTP.get(URI.parse('http://code.google.com/apis/ajaxlibs/documentation/'))).search('.al-liblist').map {|lib|
lib_name = search_libstate(lib, 'name')
versions = search_libstate(lib, 'versions').split(/[, ]+/)
[search_libstate(lib, 'path'), search_libstate(lib, 'path(u)')].squash.map {|path_template|
versions.map {|version|
URI.parse path_template.gsub(versions.last, version)
}
}
}.flatten
end
def missing_urls
urls.tap {|urls|
log "#{urls.length} items to consider."
}.reject {|url| (var(:mirror_root) / url.path).exists? }.tap {|present|
log "Of those, we have #{present.length}."
}
end
met? { missing_urls.empty? }
meet {
missing_urls.each {|url|
cd var(:mirror_root) / url.path.p.dirname, :create => true do
Babushka::Archive.download url
end
}
}
end