Skip to content

Commit

Permalink
Add typing hint to httpcache downloadermiddlewares (scrapy#4243)
Browse files Browse the repository at this point in the history
  • Loading branch information
grammy-jiang committed Aug 17, 2020
1 parent a8e08d5 commit 55edf8d
Showing 1 changed file with 29 additions and 12 deletions.
41 changes: 29 additions & 12 deletions scrapy/downloadermiddlewares/httpcache.py
@@ -1,4 +1,5 @@
from email.utils import formatdate
from typing import Optional, Type, TypeVar

from twisted.internet import defer
from twisted.internet.error import (
Expand All @@ -13,18 +14,27 @@
from twisted.web.client import ResponseFailed

from scrapy import signals
from scrapy.crawler import Crawler
from scrapy.exceptions import IgnoreRequest, NotConfigured
from scrapy.http.request import Request
from scrapy.http.response import Response
from scrapy.settings import Settings
from scrapy.spiders import Spider
from scrapy.statscollectors import StatsCollector
from scrapy.utils.misc import load_object


HttpCacheMiddlewareTV = TypeVar("HttpCacheMiddlewareTV", bound="HttpCacheMiddleware")


class HttpCacheMiddleware:

DOWNLOAD_EXCEPTIONS = (defer.TimeoutError, TimeoutError, DNSLookupError,
ConnectionRefusedError, ConnectionDone, ConnectError,
ConnectionLost, TCPTimedOutError, ResponseFailed,
IOError)

def __init__(self, settings, stats):
def __init__(self, settings: Settings, stats: StatsCollector) -> None:
if not settings.getbool('HTTPCACHE_ENABLED'):
raise NotConfigured
self.policy = load_object(settings['HTTPCACHE_POLICY'])(settings)
Expand All @@ -33,26 +43,26 @@ def __init__(self, settings, stats):
self.stats = stats

@classmethod
def from_crawler(cls, crawler):
def from_crawler(cls: Type[HttpCacheMiddlewareTV], crawler: Crawler) -> HttpCacheMiddlewareTV:
o = cls(crawler.settings, crawler.stats)
crawler.signals.connect(o.spider_opened, signal=signals.spider_opened)
crawler.signals.connect(o.spider_closed, signal=signals.spider_closed)
return o

def spider_opened(self, spider):
def spider_opened(self, spider: Spider) -> None:
self.storage.open_spider(spider)

def spider_closed(self, spider):
def spider_closed(self, spider: Spider) -> None:
self.storage.close_spider(spider)

def process_request(self, request, spider):
def process_request(self, request: Request, spider: Spider) -> Optional[Response]:
if request.meta.get('dont_cache', False):
return
return None

# Skip uncacheable requests
if not self.policy.should_cache_request(request):
request.meta['_dont_cache'] = True # flag as uncacheable
return
return None

# Look for cached response and check if expired
cachedresponse = self.storage.retrieve_response(spider, request)
Expand All @@ -61,7 +71,7 @@ def process_request(self, request, spider):
if self.ignore_missing:
self.stats.inc_value('httpcache/ignore', spider=spider)
raise IgnoreRequest("Ignored request not in cache: %s" % request)
return # first time request
return None # first time request

# Return cached response only if not expired
cachedresponse.flags.append('cached')
Expand All @@ -73,7 +83,9 @@ def process_request(self, request, spider):
# process_response hook
request.meta['cached_response'] = cachedresponse

def process_response(self, request, response, spider):
return None

def process_response(self, request: Request, response: Response, spider: Spider) -> Response:
if request.meta.get('dont_cache', False):
return response

Expand All @@ -85,7 +97,7 @@ def process_response(self, request, response, spider):
# RFC2616 requires origin server to set Date header,
# https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.18
if 'Date' not in response.headers:
response.headers['Date'] = formatdate(usegmt=1)
response.headers['Date'] = formatdate(usegmt=True)

# Do not validate first-hand responses
cachedresponse = request.meta.pop('cached_response', None)
Expand All @@ -102,13 +114,18 @@ def process_response(self, request, response, spider):
self._cache_response(spider, response, request, cachedresponse)
return response

def process_exception(self, request, exception, spider):
def process_exception(
self, request: Request, exception: Exception, spider: Spider
) -> Optional[Response]:
cachedresponse = request.meta.pop('cached_response', None)
if cachedresponse is not None and isinstance(exception, self.DOWNLOAD_EXCEPTIONS):
self.stats.inc_value('httpcache/errorrecovery', spider=spider)
return cachedresponse
return None

def _cache_response(self, spider, response, request, cachedresponse):
def _cache_response(
self, spider: Spider, response: Response, request: Request, cachedresponse: Optional[Response]
) -> None:
if self.policy.should_cache_response(response, request):
self.stats.inc_value('httpcache/store', spider=spider)
self.storage.store_response(spider, request, response)
Expand Down

0 comments on commit 55edf8d

Please sign in to comment.