Skip to content

Commit

Permalink
[RetryMiddleware] Changing name of log level
Browse files Browse the repository at this point in the history
  • Loading branch information
further-reading committed Oct 13, 2022
1 parent 648770b commit dda9ce4
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 7 deletions.
12 changes: 6 additions & 6 deletions scrapy/downloadermiddlewares/retry.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def get_retry_request(
max_retry_times: Optional[int] = None,
priority_adjust: Optional[int] = None,
logger: Logger = retry_logger,
logger_give_up_level: int = 40,
give_up_log_level: int = 40,
stats_base_key: str = 'retry',
):
"""
Expand Down Expand Up @@ -83,7 +83,7 @@ def parse(self, response):
*logger* is the logging.Logger object to be used when logging messages
*logger_give_up_level* is the level that should be used for give up logs.
*give_up_log_level* is the level that should be used for give up logs.
40 is the default level for ERROR logs.
*stats_base_key* is a string to be used as the base key for the
Expand Down Expand Up @@ -120,7 +120,7 @@ def parse(self, response):
else:
stats.inc_value(f'{stats_base_key}/max_reached')
logger.log(
logger_give_up_level,
give_up_log_level,
"Gave up retrying %(request)s (failed %(retry_times)d times): "
"%(reason)s",
{'request': request, 'retry_times': retry_times, 'reason': reason},
Expand All @@ -144,7 +144,7 @@ def __init__(self, settings):
self.max_retry_times = settings.getint('RETRY_TIMES')
self.retry_http_codes = set(int(x) for x in settings.getlist('RETRY_HTTP_CODES'))
self.priority_adjust = settings.getint('RETRY_PRIORITY_ADJUST')
self.logger_give_up_level = settings.getint('RETRY_LOG_GIVE_UP_LEVEL')
self.give_up_log_level = settings.getint('RETRY_GIVE_UP_LOG_LEVEL')

@classmethod
def from_crawler(cls, crawler):
Expand All @@ -168,12 +168,12 @@ def process_exception(self, request, exception, spider):
def _retry(self, request, reason, spider):
max_retry_times = request.meta.get('max_retry_times', self.max_retry_times)
priority_adjust = request.meta.get('priority_adjust', self.priority_adjust)
logger_give_up_level = request.meta.get('logger_give_up_level', self.logger_give_up_level)
give_up_log_level = request.meta.get('logger_give_up_level', self.give_up_log_level)
return get_retry_request(
request,
reason=reason,
spider=spider,
max_retry_times=max_retry_times,
priority_adjust=priority_adjust,
logger_give_up_level=logger_give_up_level,
give_up_log_level=give_up_log_level,
)
2 changes: 1 addition & 1 deletion scrapy/settings/default_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,7 +254,7 @@
RETRY_TIMES = 2 # initial response + 2 retries = 3 requests
RETRY_HTTP_CODES = [500, 502, 503, 504, 522, 524, 408, 429]
RETRY_PRIORITY_ADJUST = -1
RETRY_LOG_GIVE_UP_LEVEL = 40
RETRY_GIVE_UP_LOG_LEVEL = 40

ROBOTSTXT_OBEY = False
ROBOTSTXT_PARSER = 'scrapy.robotstxt.ProtegoRobotParser'
Expand Down

0 comments on commit dda9ce4

Please sign in to comment.