From 554ee7173403464a2581d87da28e9012ad2e1722 Mon Sep 17 00:00:00 2001 From: Yohanna Lisnichuk Date: Mon, 30 May 2022 15:50:42 -0400 Subject: [PATCH] test: fix test to use max_attempts and Retry-After --- kingfisher_scrapy/spiders/portugal_base.py | 4 ++-- tests/test_spiders.py | 7 ++++--- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/kingfisher_scrapy/spiders/portugal_base.py b/kingfisher_scrapy/spiders/portugal_base.py index 66c7c9c39..78d1e01e2 100644 --- a/kingfisher_scrapy/spiders/portugal_base.py +++ b/kingfisher_scrapy/spiders/portugal_base.py @@ -13,7 +13,7 @@ class PortugalBase(LinksSpider): # Local # We will wait 1, 2, 4, 8, 16 minutes (31 minutes total). - max_retries = 5 + max_attempts = 5 initial_wait_time = 60 # start_url must be provided by subclasses. @@ -35,4 +35,4 @@ def parse(self, response): if self.is_http_success(response) or response.status == 404: yield from super().parse(response) else: - yield self.build_retry_request_or_file_error(response, wait_time, self.max_retries, True) + yield self.build_retry_request_or_file_error(response, wait_time, self.max_attempts, True) diff --git a/tests/test_spiders.py b/tests/test_spiders.py index b0861a648..0c06b91a4 100644 --- a/tests/test_spiders.py +++ b/tests/test_spiders.py @@ -31,9 +31,10 @@ def test_start_requests_http_error(spider_name): callback = request.callback or spider.parse response = Response('http://example.com', status=555, request=request) - # If `max_retries` is set, the spider handles (and retries) error responses. - if hasattr(spider, 'max_retries'): - response.request.meta['retries'] = spider.max_retries + # If `max_attempts` is set, the spider handles (and retries) error responses. + if hasattr(spider, 'max_attempts'): + response.request.meta['retries'] = spider.max_attempts + response.headers['Retry-After'] = 1 items = list(callback(response)) assert len(items) == 1