Skip to content

Commit

Permalink
Fix pycodestyle E2XX (whitespace) (#4468)
Browse files Browse the repository at this point in the history
  • Loading branch information
elacuesta committed Apr 15, 2020
1 parent 39b01b6 commit 94d7ad7
Show file tree
Hide file tree
Showing 13 changed files with 269 additions and 242 deletions.
30 changes: 15 additions & 15 deletions pytest.ini
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ flake8-ignore =
scrapy/core/downloader/__init__.py E501
scrapy/core/downloader/contextfactory.py E501 E128 E126
scrapy/core/downloader/middleware.py E501
scrapy/core/downloader/tls.py E501 E241
scrapy/core/downloader/tls.py E501
scrapy/core/downloader/webclient.py E731 E501 E128 E126
scrapy/core/downloader/handlers/__init__.py E501
scrapy/core/downloader/handlers/ftp.py E501 E128 E127
Expand Down Expand Up @@ -97,9 +97,9 @@ flake8-ignore =
scrapy/loader/processors.py E501
# scrapy/pipelines
scrapy/pipelines/__init__.py E501
scrapy/pipelines/files.py E116 E501 E266
scrapy/pipelines/images.py E265 E501
scrapy/pipelines/media.py E125 E501 E266
scrapy/pipelines/files.py E116 E501
scrapy/pipelines/images.py E501
scrapy/pipelines/media.py E125 E501
# scrapy/selector
scrapy/selector/__init__.py F403
scrapy/selector/unified.py E501 E111
Expand Down Expand Up @@ -149,7 +149,7 @@ flake8-ignore =
scrapy/__init__.py E402 E501
scrapy/cmdline.py E501
scrapy/crawler.py E501
scrapy/dupefilters.py E501 E202
scrapy/dupefilters.py E501
scrapy/exceptions.py E501
scrapy/exporters.py E501
scrapy/interfaces.py E501
Expand Down Expand Up @@ -178,13 +178,13 @@ flake8-ignore =
tests/test_command_shell.py E501 E128
tests/test_commands.py E128 E501
tests/test_contracts.py E501 E128
tests/test_crawl.py E501 E741 E265
tests/test_crawl.py E501 E741
tests/test_crawler.py F841 E501
tests/test_dependencies.py F841 E501
tests/test_downloader_handlers.py E124 E127 E128 E265 E501 E126 E123
tests/test_downloader_handlers.py E124 E127 E128 E501 E126 E123
tests/test_downloadermiddleware.py E501
tests/test_downloadermiddleware_ajaxcrawlable.py E501
tests/test_downloadermiddleware_cookies.py E731 E741 E501 E128 E265 E126
tests/test_downloadermiddleware_cookies.py E731 E741 E501 E128 E126
tests/test_downloadermiddleware_decompression.py E127
tests/test_downloadermiddleware_defaultheaders.py E501
tests/test_downloadermiddleware_downloadtimeout.py E501
Expand All @@ -199,15 +199,15 @@ flake8-ignore =
tests/test_engine.py E401 E501 E128
tests/test_exporters.py E501 E731 E128 E124
tests/test_extension_telnet.py F841
tests/test_feedexport.py E501 F841 E241
tests/test_feedexport.py E501 F841
tests/test_http_cookies.py E501
tests/test_http_headers.py E501
tests/test_http_request.py E402 E501 E127 E128 E128 E126 E123
tests/test_http_response.py E501 E128 E265
tests/test_http_response.py E501 E128
tests/test_item.py E128 F841
tests/test_link.py E501
tests/test_linkextractors.py E501 E128 E124
tests/test_loader.py E501 E731 E741 E128 E117 E241
tests/test_loader.py E501 E731 E741 E128 E117
tests/test_logformatter.py E128 E501 E122
tests/test_mail.py E128 E501
tests/test_middleware.py E501 E128
Expand All @@ -226,7 +226,7 @@ flake8-ignore =
tests/test_spidermiddleware_httperror.py E128 E501 E127 E121
tests/test_spidermiddleware_offsite.py E501 E128 E111
tests/test_spidermiddleware_output_chain.py E501
tests/test_spidermiddleware_referer.py E501 F841 E125 E201 E124 E501 E241 E121
tests/test_spidermiddleware_referer.py E501 F841 E125 E124 E501 E121
tests/test_squeues.py E501 E741
tests/test_utils_asyncio.py E501
tests/test_utils_conf.py E501 E128
Expand All @@ -235,16 +235,16 @@ flake8-ignore =
tests/test_utils_defer.py E501 F841
tests/test_utils_deprecate.py F841 E501
tests/test_utils_http.py E501 E128 W504
tests/test_utils_iterators.py E501 E128 E129 E241
tests/test_utils_iterators.py E501 E128 E129
tests/test_utils_log.py E741
tests/test_utils_python.py E501 E731
tests/test_utils_reqser.py E501 E128
tests/test_utils_request.py E501 E128
tests/test_utils_response.py E501
tests/test_utils_signal.py E741 F841 E731
tests/test_utils_sitemap.py E128 E501 E124
tests/test_utils_url.py E501 E127 E125 E501 E241 E126 E123
tests/test_webclient.py E501 E128 E122 E402 E241 E123 E126
tests/test_utils_url.py E501 E127 E125 E501 E126 E123
tests/test_webclient.py E501 E128 E122 E402 E123 E126
tests/test_cmdline/__init__.py E501
tests/test_settings/__init__.py E501 E128
tests/test_spiderloader/__init__.py E128 E501
Expand Down
4 changes: 2 additions & 2 deletions scrapy/core/downloader/tls.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@


openssl_methods = {
METHOD_TLS: SSL.SSLv23_METHOD, # protocol negotiation (recommended)
METHOD_SSLv3: SSL.SSLv3_METHOD, # SSL 3 (NOT recommended)
METHOD_TLS: SSL.SSLv23_METHOD, # protocol negotiation (recommended)
METHOD_SSLv3: SSL.SSLv3_METHOD, # SSL 3 (NOT recommended)
METHOD_TLSv10: SSL.TLSv1_METHOD, # TLS 1.0 only
METHOD_TLSv11: getattr(SSL, 'TLSv1_1_METHOD', 5), # TLS 1.1 only
METHOD_TLSv12: getattr(SSL, 'TLSv1_2_METHOD', 6), # TLS 1.2 only
Expand Down
2 changes: 1 addition & 1 deletion scrapy/dupefilters.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def close(self, reason):
def log(self, request, spider):
if self.debug:
msg = "Filtered duplicate request: %(request)s (referer: %(referer)s)"
args = {'request': request, 'referer': referer_str(request) }
args = {'request': request, 'referer': referer_str(request)}
self.logger.debug(msg, args, extra={'spider': spider})
elif self.logdupes:
msg = ("Filtered duplicate request: %(request)s"
Expand Down
2 changes: 1 addition & 1 deletion scrapy/pipelines/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -500,7 +500,7 @@ def inc_stats(self, spider, status):
spider.crawler.stats.inc_value('file_count', spider=spider)
spider.crawler.stats.inc_value('file_status_count/%s' % status, spider=spider)

### Overridable Interface
# Overridable Interface
def get_media_requests(self, item, info):
return [Request(x) for x in item.get(self.files_urls_field, [])]

Expand Down
2 changes: 1 addition & 1 deletion scrapy/pipelines/images.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from scrapy.http import Request
from scrapy.settings import Settings
from scrapy.exceptions import DropItem
#TODO: from scrapy.pipelines.media import MediaPipeline
# TODO: from scrapy.pipelines.media import MediaPipeline
from scrapy.pipelines.files import FileException, FilesPipeline


Expand Down
2 changes: 1 addition & 1 deletion scrapy/pipelines/media.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ def _cache_result_and_execute_waiters(self, result, fp, info):
for wad in info.waiting.pop(fp):
defer_result(result).chainDeferred(wad)

### Overridable Interface
# Overridable Interface
def media_to_download(self, request, info):
"""Check request before starting download"""
pass
Expand Down
6 changes: 3 additions & 3 deletions tests/test_crawl.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,9 +147,9 @@ def test_start_requests_lazyness(self):
settings = {"CONCURRENT_REQUESTS": 1}
crawler = CrawlerRunner(settings).create_crawler(BrokenStartRequestsSpider)
yield crawler.crawl(mockserver=self.mockserver)
#self.assertTrue(False, crawler.spider.seedsseen)
#self.assertTrue(crawler.spider.seedsseen.index(None) < crawler.spider.seedsseen.index(99),
# crawler.spider.seedsseen)
self.assertTrue(
crawler.spider.seedsseen.index(None) < crawler.spider.seedsseen.index(99),
crawler.spider.seedsseen)

@defer.inlineCallbacks
def test_start_requests_dupes(self):
Expand Down
4 changes: 2 additions & 2 deletions tests/test_downloadermiddleware_cookies.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@ def test_cookiejar_key(self):
assert self.mw.process_request(req4, self.spider) is None
self.assertCookieValEqual(req4.headers.get('Cookie'), b'C2=value2; galleta=dulce')

#cookies from hosts with port
# cookies from hosts with port
req5_1 = Request('http://scrapytest.org:1104/')
assert self.mw.process_request(req5_1, self.spider) is None

Expand All @@ -218,7 +218,7 @@ def test_cookiejar_key(self):
assert self.mw.process_request(req5_3, self.spider) is None
self.assertEqual(req5_3.headers.get('Cookie'), b'C1=value1')

#skip cookie retrieval for not http request
# skip cookie retrieval for not http request
req6 = Request('file:///scrapy/sometempfile')
assert self.mw.process_request(req6, self.spider) is None
self.assertEqual(req6.headers.get('Cookie'), None)
Expand Down
4 changes: 2 additions & 2 deletions tests/test_http_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -438,8 +438,8 @@ def test_replace_wrong_encoding(self):
assert u'<span>value</span>' in r.text, repr(r.text)

# FIXME: This test should pass once we stop using BeautifulSoup's UnicodeDammit in TextResponse
#r = self.response_class("http://www.example.com", body=b'PREFIX\xe3\xabSUFFIX')
#assert u'\ufffd' in r.text, repr(r.text)
# r = self.response_class("http://www.example.com", body=b'PREFIX\xe3\xabSUFFIX')
# assert u'\ufffd' in r.text, repr(r.text)

def test_selector(self):
body = b"<html><head><title>Some page</title><body></body></html>"
Expand Down
Loading

0 comments on commit 94d7ad7

Please sign in to comment.