Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Merge pull request #5 from kalessin/setting

replace hubproxy job tag by a setting (as tag was needed before we had per spider settings in panel)
  • Loading branch information...
commit 023d642c0adb4c2bc98a231308ab7cb3af357f49 2 parents dc239e3 + 24e8835
@dangra dangra authored
Showing with 3 additions and 5 deletions.
  1. +1 −2  scrapylib/hubproxy.py
  2. +2 −3 scrapylib/tests/test_hubproxy.py
View
3  scrapylib/hubproxy.py
@@ -2,7 +2,6 @@
from scrapy.xlib.pydispatch import dispatcher
from scrapy import log, signals
-
class HubProxyMiddleware(object):
url = 'http://proxy.scrapinghub.com:8010'
@@ -35,7 +34,7 @@ def open_spider(self, spider):
def is_enabled(self, spider):
"""Hook to enable middleware by custom rules"""
return getattr(spider, 'use_hubproxy', False) \
- or 'hubproxy' in self.crawler.settings.getlist('SHUB_JOB_TAGS')
+ or self.crawler.settings.getbool("HUBPROXY_ENABLED")
def get_proxyauth(self, spider):
"""Hook to compute Proxy-Authorization header by custom rules"""
View
5 scrapylib/tests/test_hubproxy.py
@@ -86,10 +86,9 @@ def test_spider_use_hubproxy(self):
self.spider.use_hubproxy = False
self._assert_disabled(self.spider, self.settings)
- def test_shub_job_tags(self):
- self.settings['SHUB_JOB_TAGS'] = 'other'
+ def test_enabled(self):
self._assert_disabled(self.spider, self.settings)
- self.settings['SHUB_JOB_TAGS'] = 'hubproxy'
+ self.settings['HUBPROXY_ENABLED'] = True
self._assert_enabled(self.spider, self.settings)
def test_userpass(self):
Please sign in to comment.
Something went wrong with that request. Please try again.