Skip to content

Commit

Permalink
fixed proxy bug
Browse files Browse the repository at this point in the history
  • Loading branch information
scrubber committed Feb 15, 2009
1 parent 6f13698 commit 8089917
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 4 deletions.
7 changes: 4 additions & 3 deletions lib/scrubyt/core/navigation/agents/mechanize.rb
Expand Up @@ -123,15 +123,16 @@ def self.store_host_name(doc_url)
@@original_host_name ||= @@host_name
end #end of method store_host_name

def self.parse_and_set_proxy(proxy)
def self.parse_and_set_proxy(proxy)
@@proxy_user = @@proxy_pass = nil
if proxy.downcase == 'localhost'
@@host = 'localhost'
@@port = proxy.split(':').last
else
parts = proxy.split(':')
if (parts.size > 2)
user_pass = parts[0].split('@')
if (user.pass.size > 1)
if (user_pass.size > 1)
@@proxy_user = user_pass[0]
@@proxy_pass = user_pass[1]
else
Expand All @@ -151,7 +152,7 @@ def self.parse_and_set_proxy(proxy)
end
end
Scrubyt.log :ACTION, "[ACTION] Setting proxy: host=<#{@@host}>, port=<#{@@port}>, username=<#{@@proxy_user}, password=<#{@@proxy_pass}>"
@@agent.set_proxy(@@host, @@port)
@@agent.set_proxy(@@host, @@port, @@proxy_user, @@proxy_pass)
end

def self.determine_protocol
Expand Down
2 changes: 1 addition & 1 deletion scrubyt.gemspec
@@ -1,7 +1,7 @@
Gem::Specification.new do |s|
s.name = %q{scrubyt}
s.summary = "A powerful Web-scraping framework built on Mechanize and Hpricot (and FireWatir)"
s.version = "0.4.13"
s.version = "0.4.14"
s.authors = ["Peter Szinek", "Glenn Gillen"]
s.date = %q{2009-01-31}
s.description = %q{scRUBYt! is an easy to learn and use, yet powerful and effective web scraping framework. It's most interesting part is a Web-scraping DSL built on HPricot and WWW::Mechanize, which allows to navigate to the page of interest, then extract and query data records with a few lines of code. It is hard to describe scRUBYt! in a few sentences - you have to see it for yourself!}
Expand Down

0 comments on commit 8089917

Please sign in to comment.