Skip to content

Commit

Permalink
Removed some deprecated functions and functionalities (#6116)
Browse files Browse the repository at this point in the history
  • Loading branch information
Chenwei-Niu committed Oct 30, 2023
1 parent 1d81585 commit 1f797d0
Show file tree
Hide file tree
Showing 4 changed files with 1 addition and 87 deletions.
13 changes: 1 addition & 12 deletions scrapy/utils/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import logging
import sys
import warnings
from logging.config import dictConfig
from types import TracebackType
from typing import TYPE_CHECKING, Any, List, Optional, Tuple, Type, Union, cast
Expand All @@ -11,7 +10,6 @@
from twisted.python.failure import Failure

import scrapy
from scrapy.exceptions import ScrapyDeprecationWarning
from scrapy.settings import Settings
from scrapy.utils.versions import scrapy_components_versions

Expand Down Expand Up @@ -232,18 +230,9 @@ def logformatter_adapter(logkws: dict) -> Tuple[int, str, dict]:
and adapts it into a tuple of positional arguments for logger.log calls,
handling backward compatibility as well.
"""
if not {"level", "msg", "args"} <= set(logkws):
warnings.warn("Missing keys in LogFormatter method", ScrapyDeprecationWarning)

if "format" in logkws:
warnings.warn(
"`format` key in LogFormatter methods has been "
"deprecated, use `msg` instead",
ScrapyDeprecationWarning,
)

level = logkws.get("level", logging.INFO)
message = logkws.get("format", logkws.get("msg"))
message = logkws.get("msg") or ""
# NOTE: This also handles 'args' being an empty dict, that case doesn't
# play well in logger.log calls
args = logkws if not logkws.get("args") else logkws["args"]
Expand Down
20 changes: 0 additions & 20 deletions scrapy/utils/response.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@

import scrapy
from scrapy.http.response import Response
from scrapy.utils.decorators import deprecated
from scrapy.utils.python import to_bytes, to_unicode

_baseurl_cache: "WeakKeyDictionary[Response, str]" = WeakKeyDictionary()
Expand Down Expand Up @@ -55,25 +54,6 @@ def response_status_message(status: Union[bytes, float, int, str]) -> str:
return f"{status_int} {to_unicode(message)}"


@deprecated
def response_httprepr(response: Response) -> bytes:
"""Return raw HTTP representation (as bytes) of the given response. This
is provided only for reference, since it's not the exact stream of bytes
that was received (that's not exposed by Twisted).
"""
values = [
b"HTTP/1.1 ",
to_bytes(str(response.status)),
b" ",
to_bytes(http.RESPONSES.get(response.status, b"")),
b"\r\n",
]
if response.headers:
values.extend([response.headers.to_string(), b"\r\n"])
values.extend([b"\r\n", response.body])
return b"".join(values)


def open_in_browser(
response: Union[
"scrapy.http.response.html.HtmlResponse",
Expand Down
23 changes: 0 additions & 23 deletions tests/test_downloadermiddleware_stats.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,8 @@
import warnings
from itertools import product
from unittest import TestCase

from scrapy.downloadermiddlewares.stats import DownloaderStats
from scrapy.exceptions import ScrapyDeprecationWarning
from scrapy.http import Request, Response
from scrapy.spiders import Spider
from scrapy.utils.response import response_httprepr
from scrapy.utils.test import get_crawler


Expand Down Expand Up @@ -40,25 +36,6 @@ def test_process_response(self):
self.mw.process_response(self.req, self.res, self.spider)
self.assertStatsEqual("downloader/response_count", 1)

def test_response_len(self):
body = (b"", b"not_empty") # empty/notempty body
headers = (
{},
{"lang": "en"},
{"lang": "en", "User-Agent": "scrapy"},
) # 0 headers, 1h and 2h
test_responses = [ # form test responses with all combinations of body/headers
Response(url="scrapytest.org", status=200, body=r[0], headers=r[1])
for r in product(body, headers)
]
for test_response in test_responses:
self.crawler.stats.set_value("downloader/response_bytes", 0)
self.mw.process_response(self.req, test_response, self.spider)
with warnings.catch_warnings():
warnings.simplefilter("ignore", ScrapyDeprecationWarning)
resp_size = len(response_httprepr(test_response))
self.assertStatsEqual("downloader/response_bytes", resp_size)

def test_process_exception(self):
self.mw.process_exception(self.req, MyException(), self.spider)
self.assertStatsEqual("downloader/exception_count", 1)
Expand Down
32 changes: 0 additions & 32 deletions tests/test_utils_response.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,13 @@
import unittest
import warnings
from pathlib import Path
from urllib.parse import urlparse

from scrapy.exceptions import ScrapyDeprecationWarning
from scrapy.http import HtmlResponse, Response, TextResponse
from scrapy.utils.python import to_bytes
from scrapy.utils.response import (
get_base_url,
get_meta_refresh,
open_in_browser,
response_httprepr,
response_status_message,
)

Expand All @@ -20,35 +17,6 @@
class ResponseUtilsTest(unittest.TestCase):
dummy_response = TextResponse(url="http://example.org/", body=b"dummy_response")

def test_response_httprepr(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore", ScrapyDeprecationWarning)

r1 = Response("http://www.example.com")
self.assertEqual(response_httprepr(r1), b"HTTP/1.1 200 OK\r\n\r\n")

r1 = Response(
"http://www.example.com",
status=404,
headers={"Content-type": "text/html"},
body=b"Some body",
)
self.assertEqual(
response_httprepr(r1),
b"HTTP/1.1 404 Not Found\r\nContent-Type: text/html\r\n\r\nSome body",
)

r1 = Response(
"http://www.example.com",
status=6666,
headers={"Content-type": "text/html"},
body=b"Some body",
)
self.assertEqual(
response_httprepr(r1),
b"HTTP/1.1 6666 \r\nContent-Type: text/html\r\n\r\nSome body",
)

def test_open_in_browser(self):
url = "http:///www.example.com/some/page.html"
body = b"<html> <head> <title>test page</title> </head> <body>test body</body> </html>"
Expand Down

0 comments on commit 1f797d0

Please sign in to comment.