Permalink
Browse files

OMG, whitespace cleanup!

  • Loading branch information...
1 parent 2811f97 commit fc913bfffea817feb97354a028706ef146505bcb @jonallured jonallured committed Nov 13, 2013
Showing with 191 additions and 192 deletions.
  1. +0 −1 Guardfile
  2. +1 −1 README.md
  3. +1 −1 Rakefile
  4. +1 −1 feedzirra.gemspec
  5. +1 −1 lib/feedzirra.rb
  6. +1 −1 lib/feedzirra/core_ext/date.rb
  7. +2 −2 lib/feedzirra/core_ext/string.rb
  8. +16 −16 lib/feedzirra/feed.rb
  9. +1 −1 lib/feedzirra/parser.rb
  10. +2 −2 lib/feedzirra/parser/atom_entry.rb
  11. +1 −1 lib/feedzirra/parser/atom_feed_burner.rb
  12. +2 −2 lib/feedzirra/parser/atom_feed_burner_entry.rb
  13. +1 −1 lib/feedzirra/parser/google_docs_atom_entry.rb
  14. +2 −2 lib/feedzirra/parser/itunes_rss_item.rb
  15. +3 −3 lib/feedzirra/parser/itunes_rss_owner.rb
  16. +1 −1 lib/feedzirra/parser/rss.rb
  17. +6 −6 lib/feedzirra/parser/rss_entry.rb
  18. +1 −1 lib/feedzirra/parser/rss_feed_burner.rb
  19. +1 −1 lib/feedzirra/parser/rss_feed_burner_entry.rb
  20. +7 −7 spec/benchmarks/feed_benchmarks.rb
  21. +1 −1 spec/benchmarks/feedzirra_benchmarks.rb
  22. +3 −3 spec/benchmarks/fetching_benchmarks.rb
  23. +1 −1 spec/benchmarks/parsing_benchmark.rb
  24. +6 −6 spec/benchmarks/updating_benchmarks.rb
  25. +7 −7 spec/feedzirra/feed_entry_utilities_spec.rb
  26. +35 −35 spec/feedzirra/feed_spec.rb
  27. +22 −22 spec/feedzirra/feed_utilities_spec.rb
  28. +1 −1 spec/feedzirra/parser/atom_entry_spec.rb
  29. +7 −7 spec/feedzirra/parser/atom_feed_burner_entry_spec.rb
  30. +7 −7 spec/feedzirra/parser/atom_feed_burner_spec.rb
  31. +9 −9 spec/feedzirra/parser/atom_spec.rb
  32. +11 −11 spec/feedzirra/parser/itunes_rss_item_spec.rb
  33. +4 −4 spec/feedzirra/parser/itunes_rss_owner_spec.rb
  34. +8 −8 spec/feedzirra/parser/itunes_rss_spec.rb
  35. +1 −1 spec/feedzirra/parser/rss_feed_burner_entry_spec.rb
  36. +8 −8 spec/feedzirra/parser/rss_feed_burner_spec.rb
  37. +6 −6 spec/feedzirra/parser/rss_spec.rb
  38. +3 −3 spec/spec_helper.rb
View
@@ -3,4 +3,3 @@ guard 'rspec' do
watch(%r{^lib/(.+)\.rb$}) { |m| "spec/#{m[1]}_spec.rb" }
watch('spec/spec_helper.rb') { "spec" }
end
-
View
@@ -176,4 +176,4 @@ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
View
@@ -3,4 +3,4 @@ require "bundler/gem_tasks"
require 'rspec/core/rake_task'
RSpec::Core::RakeTask.new(:spec)
task :test => :spec
-task :default => :test
+task :default => :test
View
@@ -16,7 +16,7 @@ Gem::Specification.new do |s|
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.require_paths = ['lib']
-
+
s.platform = Gem::Platform::RUBY
s.add_dependency 'nokogiri', '~> 1.6.0'
View
@@ -12,6 +12,6 @@ module Feedzirra
autoload :FeedUtilities, 'feedzirra/feed_utilities'
autoload :Feed, 'feedzirra/feed'
autoload :Parser, 'feedzirra/parser'
-
+
class NoParserAvailable < StandardError; end
end
@@ -16,4 +16,4 @@ def feed_utils_to_time(dest, method)
Time.send(method, dest.year, dest.month, dest.day, dest.hour, dest.min,
dest.sec, dest.zone)
end
-end
+end
@@ -2,8 +2,8 @@ class String
def sanitize!
self.replace(sanitize)
end
-
+
def sanitize
Loofah.scrub_fragment(self, :prune).to_s
end
-end
+end
View
@@ -1,7 +1,7 @@
-module Feedzirra
+module Feedzirra
class Feed
USER_AGENT = "feedzirra http://github.com/pauldix/feedzirra/tree/master"
-
+
# Takes a raw XML feed and attempts to parse it. If no parser is available a Feedzirra::NoParserAvailable exception is raised.
# You can pass a block to be called when there's an error during the parsing.
# === Parameters
@@ -19,7 +19,7 @@ def self.parse(xml, &block)
end
# Determines the correct parser class to use for parsing the feed.
- #
+ #
# === Parameters
# [xml<String>] The XML that you would like determine the parser for.
# === Returns
@@ -35,7 +35,7 @@ def self.determine_feed_parser_for_xml(xml)
# [klass<Constant>] The class/constant that you want to register.
# === Returns
# A updated array of feed parser class names.
- def self.add_feed_class(klass)
+ def self.add_feed_class(klass)
feed_classes.unshift klass
end
@@ -46,7 +46,7 @@ def self.add_feed_class(klass)
def self.feed_classes
@feed_classes ||= [Feedzirra::Parser::RSSFeedBurner, Feedzirra::Parser::GoogleDocsAtom, Feedzirra::Parser::AtomFeedBurner, Feedzirra::Parser::Atom, Feedzirra::Parser::ITunesRSS, Feedzirra::Parser::RSS]
end
-
+
# Makes all registered feeds types look for the passed in element to parse.
# This is actually just a call to element (a SAXMachine call) in the class.
#
@@ -80,7 +80,7 @@ def self.add_common_feed_elements(element_tag, options = {})
def self.add_common_feed_entry_element(element_tag, options = {})
call_on_each_feed_entry :element, element_tag, options
end
-
+
# Makes all registered entry types look for the passed in elements to parse.
# This is actually just a call to element (a SAXMachine call) in the class.
#
@@ -144,7 +144,7 @@ def self.setup_easy curl, options
# * all parameters defined in setup_easy
# === Returns
# A String of XML if a single URL is passed.
- #
+ #
# A Hash if multiple URL's are passed. The key will be the URL, and the value the XML.
def self.fetch_raw(urls, options = {})
url_queue = [*urls]
@@ -189,13 +189,13 @@ def self.fetch_and_parse(urls, options = {})
url_queue = [*urls]
multi = Curl::Multi.new
responses = {}
-
+
# I broke these down so I would only try to do 30 simultaneously because
# I was getting weird errors when doing a lot. As one finishes it pops another off the queue.
url_queue.slice!(0, 30).each do |url|
add_url_to_multi(multi, url, url_queue, responses, options)
end
-
+
multi.perform
return urls.is_a?(String) ? responses.values.first : responses
end
@@ -212,7 +212,7 @@ def self.decode_content(c)
gz = Zlib::GzipReader.new(StringIO.new(c.body_str))
xml = gz.read
gz.close
- rescue Zlib::GzipFile::Error
+ rescue Zlib::GzipFile::Error
# Maybe this is not gzipped?
xml = c.body_str
end
@@ -241,15 +241,15 @@ def self.update(feeds, options = {})
feed_queue = [*feeds]
multi = Curl::Multi.new
responses = {}
-
+
feed_queue.slice!(0, 30).each do |feed|
add_feed_to_multi(multi, feed, feed_queue, responses, options)
end
-
+
multi.perform
feeds.is_a?(Array) ? responses : responses.values.first
end
-
+
# An abstraction for adding a feed by URL to the passed Curb::multi stack.
#
# === Parameters
@@ -274,7 +274,7 @@ def self.add_url_to_multi(multi, url, url_queue, responses, options)
add_url_to_multi(multi, url_queue.shift, url_queue, responses, options) unless url_queue.empty?
xml = decode_content(c)
klass = determine_feed_parser_for_xml(xml)
-
+
if klass
begin
feed = klass.parse(xml, Proc.new{|message| warn "Error while parsing [#{url}] #{message}" })
@@ -332,7 +332,7 @@ def self.add_url_to_multi(multi, url, url_queue, responses, options)
# * all parameters defined in setup_easy
# === Returns
# The updated Curl::Multi object with the request details added to it's stack.
- def self.add_feed_to_multi(multi, feed, feed_queue, responses, options)
+ def self.add_feed_to_multi(multi, feed, feed_queue, responses, options)
easy = Curl::Easy.new(feed.feed_url) do |curl|
setup_easy curl, options
curl.headers["If-Modified-Since"] = feed.last_modified.httpdate if feed.last_modified
@@ -370,7 +370,7 @@ def self.add_feed_to_multi(multi, feed, feed_queue, responses, options)
end
# Determines the etag from the request headers.
- #
+ #
# === Parameters
# [header<String>] Raw request header returned from the request
# === Returns
View
@@ -4,7 +4,7 @@ module Parser
autoload :RSSEntry, 'feedzirra/parser/rss_entry'
autoload :RSSFeedBurner, 'feedzirra/parser/rss_feed_burner'
autoload :RSSFeedBurnerEntry, 'feedzirra/parser/rss_feed_burner_entry'
-
+
autoload :ITunesRSS, 'feedzirra/parser/itunes_rss'
autoload :ITunesRSSItem, 'feedzirra/parser/itunes_rss_item'
autoload :ITunesRSSOwner, 'feedzirra/parser/itunes_rss_owner'
@@ -5,7 +5,7 @@ module Parser
class AtomEntry
include SAXMachine
include FeedEntryUtilities
-
+
element :title
element :link, :as => :url, :value => :href, :with => {:type => "text/html", :rel => "alternate"}
element :name, :as => :author
@@ -31,4 +31,4 @@ def url
end
-end
+end
@@ -18,4 +18,4 @@ def self.able_to_parse?(xml) #:nodoc:
end
-end
+end
@@ -5,7 +5,7 @@ module Parser
class AtomFeedBurnerEntry
include SAXMachine
include FeedEntryUtilities
-
+
element :title
element :name, :as => :author
element :link, :as => :url, :value => :href, :with => {:type => "text/html", :rel => "alternate"}
@@ -32,4 +32,4 @@ def url
end
end
-end
+end
@@ -3,7 +3,7 @@ module Parser
class GoogleDocsAtomEntry
include SAXMachine
include FeedEntryUtilities
-
+
element :title
element :link, :as => :url, :value => :href, :with => {:type => "text/html", :rel => "alternate"}
element :name, :as => :author
@@ -1,5 +1,5 @@
module Feedzirra
-
+
module Parser
# iTunes extensions to the standard RSS2.0 item
# Source: http://www.apple.com/itunes/whatson/podcasts/specs.html
@@ -28,5 +28,5 @@ class ITunesRSSItem
element :enclosure, :value => :url, :as => :enclosure_url
end
end
-
+
end
@@ -1,5 +1,5 @@
module Feedzirra
-
+
module Parser
class ITunesRSSOwner
include SAXMachine
@@ -8,5 +8,5 @@ class ITunesRSSOwner
element :"itunes:email", :as => :email
end
end
-
-end
+
+end
@@ -19,4 +19,4 @@ def self.able_to_parse?(xml) #:nodoc:
end
-end
+end
@@ -8,27 +8,27 @@ class RSSEntry
element :title
element :link, :as => :url
-
+
element :"dc:creator", :as => :author
element :author, :as => :author
element :"content:encoded", :as => :content
element :description, :as => :summary
element :"media:content", :as => :image, :value => :url
element :enclosure, :as => :image, :value => :url
-
+
element :pubDate, :as => :published
element :pubdate, :as => :published
element :"dc:date", :as => :published
element :"dc:Date", :as => :published
element :"dcterms:created", :as => :published
-
-
+
+
element :"dcterms:modified", :as => :updated
element :issued, :as => :published
elements :category, :as => :categories
-
- element :guid, :as => :entry_id
+
+ element :guid, :as => :entry_id
end
end
@@ -19,4 +19,4 @@ def self.able_to_parse?(xml) #:nodoc:
end
-end
+end
@@ -35,7 +35,7 @@ class RSSFeedBurnerEntry
def url
@url || @link
end
-
+
end
end
@@ -44,22 +44,22 @@
# puts res.slice(0, 500)
end
end
-
+
require 'rfuzz/session'
- include RFuzz
+ include RFuzz
t.report("rfuzz") do
GET_COUNT.times do
http = HttpClient.new("www.pauldix.net", 80)
response = http.get("/")
- if response.http_status != "200"
+ if response.http_status != "200"
puts "***** #{response.http_status}"
else
# puts response.http_status
# puts response.http_body.slice(0, 500)
end
end
end
-
+
require 'eventmachine'
t.report("eventmachine") do
counter = GET_COUNT
@@ -76,8 +76,8 @@
end
end
end
-
-
+
+
require 'curl-multi'
t.report("curl multi") do
multi = Curl::Multi.new
@@ -95,4 +95,4 @@
multi.select([], []) while multi.size > 0
end
-end
+end
@@ -10,7 +10,7 @@
urls = File.readlines(File.dirname(__FILE__) + "/../sample_feeds/successful_feed_urls.txt").slice(0, 20)
puts "benchmarks on #{urls.size} feeds"
puts "************************************"
-benchmark do |t|
+benchmark do |t|
t.report("feedzirra") do
iterations.times do
Feedzirra::Feed.fetch_and_parse(urls, :on_success => lambda { |url, feed| $stdout.print '.'; $stdout.flush })
@@ -10,7 +10,7 @@
urls = File.readlines(File.dirname(__FILE__) + "/../sample_feeds/successful_feed_urls.txt").slice(0, 20)
puts "benchmarks on #{urls.size} feeds"
puts "************************************"
-benchmark do |t|
+benchmark do |t|
t.report("feedzirra open uri") do
iterations.times do
urls.each do |url|
@@ -24,5 +24,5 @@
iterations.times do
Feedzirra::Feed.fetch_and_parse(urls, :on_success => lambda { |url, feed| $stdout.print '.'; $stdout.flush })
end
- end
-end
+ end
+end
Oops, something went wrong.

0 comments on commit fc913bf

Please sign in to comment.