Permalink
Browse files

gemify

  • Loading branch information...
1 parent 566fb5a commit ab1e165ab8cd9e42f5ed630d5901628ae2b2dc02 @grosser committed Mar 16, 2011
View
@@ -6,4 +6,5 @@ gem 'fastercsv'
group :dev do
gem 'rake'
gem 'rspec', '~>2'
+ gem 'jeweler'
end
View
@@ -3,6 +3,11 @@ GEM
specs:
diff-lcs (1.1.2)
fastercsv (1.5.4)
+ git (1.2.5)
+ jeweler (1.5.2)
+ bundler (~> 1.0.0)
+ git (>= 1.2.5)
+ rake
mechanize (1.0.0)
nokogiri (>= 1.2.1)
nokogiri (1.4.4)
@@ -21,6 +26,7 @@ PLATFORMS
DEPENDENCIES
fastercsv
+ jeweler
mechanize
rake
rspec (~> 2)
View
@@ -5,6 +5,21 @@ end
task :headers do
require File.expand_path('lib/webpagetest_grabber')
csv = `curl http://www.webpagetest.org/result/110316_DA_5TFD/110316_DA_5TFD_google.com_page_data.csv`
- data = WebpagetestGrabber.csv_to_array(csv).first.keys
+ data = PagespeedGrabber.csv_to_array(csv).first.keys
puts data.inspect
end
+
+begin
+ require 'jeweler'
+ Jeweler::Tasks.new do |gem|
+ gem.name = 'pagespeed_grabber'
+ gem.summary = "Grab pagespeed results from external services to use them in e.g. munin."
+ gem.email = "michael@grosser.it"
+ gem.homepage = "http://github.com/grosser/#{gem.name}"
+ gem.authors = ["Michael Grosser"]
+ end
+
+ Jeweler::GemcutterTasks.new
+rescue LoadError
+ puts "Jeweler, or one of its dependencies, is not available. Install it with: gem install jeweler"
+end
View
@@ -1,4 +1,21 @@
-Grab raw csv results from webpagetest.org
+Grab pagespeed results from external services to use them in e.g. munin.<br/>
+Currently only support webpagetest.org, which is not very reliable and sometimes slow.
- # if the queue is full this will take forever, so be careful !!
- data = WebpagetestGrabber.fetch('google.com', :timeout => 200)
+Not ready for production just a toy project.
+
+ data = PagespeedGrabber.fetch('google.com', :from => 'webpagetest', :timeout => 200)
+
+### Munin
+`munin.rb` can be moved into munin/plugins and renamed to e.g. webpagetest_google.com to aggregate results.
+
+TODO
+====
+ - add other services
+ - make less hacky/use official apis
+ - generate initial AND repeated graph
+
+Authors
+=======
+[Michael Grosser](http://grosser.it)<br/>
+michael@grosser.it<br/>
+Hereby placed under public domain, do what you want, just do not hold me accountable...
View
@@ -0,0 +1 @@
+0.0.1
@@ -3,18 +3,17 @@
require 'faster_csv'
require 'timeout'
-module WebpagetestGrabber
+module PagespeedGrabber
URL = 'http://www.webpagetest.org'
-
- # page takes ~ 20s to generate
+ VERSION = File.read( File.join(File.dirname(__FILE__),'..','VERSION') ).strip
DEFAULT_TIMEOUT = 100
TIME_BETWEEN_TRIES = 10
CSV_LINK = 'Raw page data'
HEADERS = ["Load Time (ms)", "Other Responses (Doc)", "Connections", "Minify Savings", "Experimental", "IP Address", "ETag Score", "GZIP Score", "DNS Lookups", "Event Name", "Not Found", "Segments Transmitted", "Keep-Alive Score", "Time", "Not Modified", "Cookie Score", "Measurement Type", "Gzip Savings", "Time to Base Page Complete (ms)", "One CDN Score", "OK Responses", "Error Code", "unused", "Requests", "Other Responses", "Includes Object Data", "Minify Total Bytes", "Flagged Requests", "AFT (ms)", "Packet Loss (out)", "Compression Score", "URL", "OK Responses (Doc)", "Combine Score", "Base Page Result", "Doc Complete Time (ms)", "Redirects (Doc)", "Pagetest Build", "Minify Score", "Time to Start Render (ms)", "Cache Score", "Base Page Redirects", "Bytes Out (Doc)", "Descriptor", "Dialer ID", "Connection Type", "Activity Time(ms)", "Time to First Byte (ms)", "Date", "Not Found (Doc)", "Redirects", "Not Modified (Doc)", "Event GUID", "Event URL", "Requests (Doc)", "Bytes In (Doc)", "Time to DOM Element (ms)", "Static CDN Score", "Optimization Checked", "Image Savings", "Connections (Doc)", "Flagged Connections", "Max Simultaneous Flagged Connections", "Cached", "Gzip Total Bytes", "Bytes Out", "Bytes In", "DNS Lookups (Doc)", "Segments Retransmitted", "DOCTYPE Score", "Image Total Bytes", "Host", "Lab ID"]
def self.fetch(test_url, options={})
Timeout.timeout(options[:timeout]||DEFAULT_TIMEOUT) do
- csv_to_array(download_csv(test_url, options))
+ csv_to_array(download_csv(test_url))
end
end
View
@@ -1,6 +1,6 @@
#!/usr/bin/env ruby
require 'rubygems'
-require 'webpagetest_grabber'
+require 'pagespeed_grabber'
test_url = File.dirname(__FILE__).split('_',2).last
@@ -14,13 +14,12 @@ def clean_name(name)
puts "graph_scale no\n";
puts "graph_category other\n";
- WebpagetestGrabber::HEADERS.each do |header|
+ PagespeedGrabber::HEADERS.each do |header|
puts "#{clean_name(header)}.label #{header}\n"
end
else
- initial, repeated = WebpagetestGrabber.fetch(test_url)
+ initial, repeated = PagespeedGrabber.fetch(test_url)
initial.each do |header, value|
puts "#{clean_name(header)}.value #{value}"
end
end
-
View
@@ -0,0 +1,47 @@
+# Generated by jeweler
+# DO NOT EDIT THIS FILE DIRECTLY
+# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
+# -*- encoding: utf-8 -*-
+
+Gem::Specification.new do |s|
+ s.name = %q{pagespeed_grabber}
+ s.version = "0.0.1"
+
+ s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
+ s.authors = ["Michael Grosser"]
+ s.date = %q{2011-03-16}
+ s.email = %q{michael@grosser.it}
+ s.files = [
+ "Gemfile",
+ "Gemfile.lock",
+ "Rakefile",
+ "Readme.md",
+ "VERSION",
+ "lib/pagespeed_grabber.rb",
+ "munin.rb",
+ "spec/pagespeed_grabber_spec.rb"
+ ]
+ s.homepage = %q{http://github.com/grosser/pagespeed_grabber}
+ s.require_paths = ["lib"]
+ s.rubygems_version = %q{1.4.2}
+ s.summary = %q{Grab pagespeed results from external services to use them in e.g. munin.}
+ s.test_files = [
+ "spec/pagespeed_grabber_spec.rb"
+ ]
+
+ if s.respond_to? :specification_version then
+ s.specification_version = 3
+
+ if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
+ s.add_runtime_dependency(%q<mechanize>, [">= 0"])
+ s.add_runtime_dependency(%q<fastercsv>, [">= 0"])
+ else
+ s.add_dependency(%q<mechanize>, [">= 0"])
+ s.add_dependency(%q<fastercsv>, [">= 0"])
+ end
+ else
+ s.add_dependency(%q<mechanize>, [">= 0"])
+ s.add_dependency(%q<fastercsv>, [">= 0"])
+ end
+end
+
@@ -0,0 +1,8 @@
+require File.expand_path('lib/pagespeed_grabber')
+
+describe PagespeedGrabber do
+ it "can fetch results for an url" do
+ initial, repeated = PagespeedGrabber.fetch('google.com')
+ initial['Load Time (ms)'].to_i.should > repeated['Load Time (ms)'].to_i
+ end
+end
@@ -1,8 +0,0 @@
-require File.expand_path('lib/webpagetest_grabber')
-
-describe WebpagetestGrabber do
- it "can fetch results for an url" do
- initial, repeated = WebpagetestGrabber.fetch('google.com')
- initial['Load Time (ms)'].to_i.should > repeated['Load Time (ms)'].to_i
- end
-end

0 comments on commit ab1e165

Please sign in to comment.